source: trunk/python/scantable.py@ 1537

Last change on this file since 1537 was 1536, checked in by Malte Marquarding, 16 years ago

Fix for ticket #154: flagged data wasnrt honoured in any fitting process, e.g. poly_baseline

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 70.0 KB
RevLine 
[876]1from asap._asap import Scantable
[226]2from asap import rcParams
[1118]3from asap import print_log
4from asap import asaplog
[946]5from asap import selector
[1153]6from asap import linecatalog
[1295]7from asap import _n_bools, mask_not, mask_and, mask_or
[102]8
[876]9class scantable(Scantable):
[102]10 """
11 The ASAP container for scans
12 """
[710]13
[484]14 def __init__(self, filename, average=None, unit=None):
[102]15 """
16 Create a scantable from a saved one or make a reference
17 Parameters:
[181]18 filename: the name of an asap table on disk
19 or
20 the name of a rpfits/sdfits/ms file
21 (integrations within scans are auto averaged
22 and the whole file is read)
23 or
24 [advanced] a reference to an existing
[102]25 scantable
[484]26 average: average all integrations withinb a scan on read.
27 The default (True) is taken from .asaprc.
28 unit: brightness unit; must be consistent with K or Jy.
[340]29 Over-rides the default selected by the reader
30 (input rpfits/sdfits/ms) or replaces the value
31 in existing scantables
[710]32 """
[976]33 if average is None:
[710]34 average = rcParams['scantable.autoaverage']
[1259]35 varlist = vars()
[876]36 from asap._asap import stmath
37 self._math = stmath()
38 if isinstance(filename, Scantable):
39 Scantable.__init__(self, filename)
[181]40 else:
[1435]41 if isinstance(filename, str):# or \
42# (isinstance(filename, list) or isinstance(filename, tuple)) \
43# and isinstance(filename[-1], str):
[976]44 import os.path
45 filename = os.path.expandvars(filename)
46 filename = os.path.expanduser(filename)
47 if not os.path.exists(filename):
48 s = "File '%s' not found." % (filename)
[718]49 if rcParams['verbose']:
[976]50 asaplog.push(s)
51 print asaplog.pop().strip()
[718]52 return
[976]53 raise IOError(s)
[1115]54 if os.path.isdir(filename) \
[1118]55 and not os.path.exists(filename+'/table.f1'):
[976]56 # crude check if asap table
57 if os.path.exists(filename+'/table.info'):
[1118]58 ondisk = rcParams['scantable.storage'] == 'disk'
59 Scantable.__init__(self, filename, ondisk)
[976]60 if unit is not None:
61 self.set_fluxunit(unit)
62 self.set_freqframe(rcParams['scantable.freqframe'])
[718]63 else:
[1118]64 msg = "The given file '%s'is not a valid " \
65 "asap table." % (filename)
[976]66 if rcParams['verbose']:
67 print msg
68 return
69 else:
70 raise IOError(msg)
[226]71 else:
[1118]72 self._fill([filename], unit, average)
73 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]74 and isinstance(filename[-1], str):
75 self._fill(filename, unit, average)
[1259]76 self._add_history("scantable", varlist)
[714]77 print_log()
[102]78
[876]79 def save(self, name=None, format=None, overwrite=False):
[116]80 """
[1280]81 Store the scantable on disk. This can be an asap (aips++) Table,
82 SDFITS or MS2 format.
[116]83 Parameters:
[1093]84 name: the name of the outputfile. For format "ASCII"
85 this is the root file name (data in 'name'.txt
[497]86 and header in 'name'_header.txt)
[116]87 format: an optional file format. Default is ASAP.
[280]88 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
[194]89 'SDFITS' (save as SDFITS file)
[200]90 'ASCII' (saves as ascii text file)
[226]91 'MS2' (saves as an aips++
92 MeasurementSet V2)
[1443]93 'FITS' (save as image FITS - not
94 readable by class)
95 'CLASS' (save as FITS readable by CLASS)
[411]96 overwrite: If the file should be overwritten if it exists.
[256]97 The default False is to return with warning
[411]98 without writing the output. USE WITH CARE.
[116]99 Example:
100 scan.save('myscan.asap')
[1118]101 scan.save('myscan.sdfits', 'SDFITS')
[116]102 """
[411]103 from os import path
[226]104 if format is None: format = rcParams['scantable.save']
[256]105 suffix = '.'+format.lower()
[1118]106 if name is None or name == "":
[256]107 name = 'scantable'+suffix
[718]108 msg = "No filename given. Using default name %s..." % name
109 asaplog.push(msg)
[411]110 name = path.expandvars(name)
[256]111 if path.isfile(name) or path.isdir(name):
112 if not overwrite:
[718]113 msg = "File %s exists." % name
114 if rcParams['verbose']:
115 print msg
116 return
117 else:
118 raise IOError(msg)
[451]119 format2 = format.upper()
120 if format2 == 'ASAP':
[116]121 self._save(name)
122 else:
[989]123 from asap._asap import stwriter as stw
[1118]124 writer = stw(format2)
125 writer.write(self, name)
[718]126 print_log()
[116]127 return
128
[102]129 def copy(self):
130 """
131 Return a copy of this scantable.
[1348]132 Note:
133 This makes a full (deep) copy. scan2 = scan1 makes a reference.
[102]134 Parameters:
[113]135 none
[102]136 Example:
137 copiedscan = scan.copy()
138 """
[876]139 sd = scantable(Scantable._copy(self))
[113]140 return sd
141
[1093]142 def drop_scan(self, scanid=None):
143 """
144 Return a new scantable where the specified scan number(s) has(have)
145 been dropped.
146 Parameters:
147 scanid: a (list of) scan number(s)
148 """
149 from asap import _is_sequence_or_number as _is_valid
150 from asap import _to_list
151 from asap import unique
152 if not _is_valid(scanid):
153 if rcParams['verbose']:
154 print "Please specify a scanno to drop from the scantable"
155 return
156 else:
157 raise RuntimeError("No scan given")
158 try:
159 scanid = _to_list(scanid)
160 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
161 for sid in scanid: allscans.remove(sid)
[1118]162 if len(allscans) == 0:
163 raise ValueError("Can't remove all scans")
[1093]164 except ValueError:
165 if rcParams['verbose']:
166 print "Couldn't find any match."
167 return
168 else: raise
169 try:
170 bsel = self.get_selection()
171 sel = selector()
172 sel.set_scans(allscans)
173 self.set_selection(bsel+sel)
174 scopy = self._copy()
175 self.set_selection(bsel)
176 return scantable(scopy)
177 except RuntimeError:
[1118]178 if rcParams['verbose']:
179 print "Couldn't find any match."
180 else:
181 raise
[1093]182
183
[102]184 def get_scan(self, scanid=None):
185 """
186 Return a specific scan (by scanno) or collection of scans (by
187 source name) in a new scantable.
[1348]188 Note:
189 See scantable.drop_scan() for the inverse operation.
[102]190 Parameters:
[513]191 scanid: a (list of) scanno or a source name, unix-style
192 patterns are accepted for source name matching, e.g.
193 '*_R' gets all 'ref scans
[102]194 Example:
[513]195 # get all scans containing the source '323p459'
196 newscan = scan.get_scan('323p459')
197 # get all 'off' scans
198 refscans = scan.get_scan('*_R')
199 # get a susbset of scans by scanno (as listed in scan.summary())
[1118]200 newscan = scan.get_scan([0, 2, 7, 10])
[102]201 """
202 if scanid is None:
[718]203 if rcParams['verbose']:
[1118]204 print "Please specify a scan no or name to " \
205 "retrieve from the scantable"
[718]206 return
207 else:
208 raise RuntimeError("No scan given")
209
[102]210 try:
[946]211 bsel = self.get_selection()
212 sel = selector()
[102]213 if type(scanid) is str:
[946]214 sel.set_name(scanid)
215 self.set_selection(bsel+sel)
[876]216 scopy = self._copy()
[946]217 self.set_selection(bsel)
[876]218 return scantable(scopy)
[102]219 elif type(scanid) is int:
[946]220 sel.set_scans([scanid])
221 self.set_selection(bsel+sel)
[876]222 scopy = self._copy()
[946]223 self.set_selection(bsel)
[876]224 return scantable(scopy)
[381]225 elif type(scanid) is list:
[946]226 sel.set_scans(scanid)
227 self.set_selection(sel)
[876]228 scopy = self._copy()
[946]229 self.set_selection(bsel)
[876]230 return scantable(scopy)
[381]231 else:
[718]232 msg = "Illegal scanid type, use 'int' or 'list' if ints."
233 if rcParams['verbose']:
234 print msg
235 else:
236 raise TypeError(msg)
[102]237 except RuntimeError:
[718]238 if rcParams['verbose']: print "Couldn't find any match."
239 else: raise
[102]240
241 def __str__(self):
[1118]242 return Scantable._summary(self, True)
[102]243
[976]244 def summary(self, filename=None):
[102]245 """
246 Print a summary of the contents of this scantable.
247 Parameters:
248 filename: the name of a file to write the putput to
249 Default - no file output
[381]250 verbose: print extra info such as the frequency table
251 The default (False) is taken from .asaprc
[102]252 """
[976]253 info = Scantable._summary(self, True)
254 #if verbose is None: verbose = rcParams['scantable.verbosesummary']
[102]255 if filename is not None:
[256]256 if filename is "":
257 filename = 'scantable_summary.txt'
[415]258 from os.path import expandvars, isdir
[411]259 filename = expandvars(filename)
[415]260 if not isdir(filename):
[413]261 data = open(filename, 'w')
262 data.write(info)
263 data.close()
264 else:
[718]265 msg = "Illegal file name '%s'." % (filename)
266 if rcParams['verbose']:
267 print msg
268 else:
269 raise IOError(msg)
270 if rcParams['verbose']:
[794]271 try:
272 from IPython.genutils import page as pager
273 except ImportError:
274 from pydoc import pager
275 pager(info)
[718]276 else:
277 return info
[710]278
[1512]279 def get_spectrum(self, rowno):
[1471]280 """Return the spectrum for the current row in the scantable as a list.
281 Parameters:
282 rowno: the row number to retrieve the spectrum from
283 """
284 return self._getspectrum(rowno)
[946]285
[1471]286 def get_mask(self, rowno):
287 """Return the mask for the current row in the scantable as a list.
288 Parameters:
289 rowno: the row number to retrieve the mask from
290 """
291 return self._getmask(rowno)
292
293 def set_spectrum(self, spec, rowno):
294 """Return the spectrum for the current row in the scantable as a list.
295 Parameters:
296 spec: the spectrum
297 rowno: the row number to set the spectrum for
298 """
299 assert(len(spec) == self.nchan())
300 return self._setspectrum(spec, rowno)
301
[946]302 def get_selection(self):
303 """
[1005]304 Get the selection object currently set on this scantable.
305 Parameters:
306 none
307 Example:
308 sel = scan.get_selection()
309 sel.set_ifs(0) # select IF 0
310 scan.set_selection(sel) # apply modified selection
[946]311 """
312 return selector(self._getselection())
313
[1005]314 def set_selection(self, selection=selector()):
[946]315 """
[1005]316 Select a subset of the data. All following operations on this scantable
317 are only applied to thi selection.
318 Parameters:
319 selection: a selector object (default unset the selection)
320 Examples:
321 sel = selector() # create a selection object
[1118]322 self.set_scans([0, 3]) # select SCANNO 0 and 3
[1005]323 scan.set_selection(sel) # set the selection
324 scan.summary() # will only print summary of scanno 0 an 3
325 scan.set_selection() # unset the selection
[946]326 """
327 self._setselection(selection)
328
[876]329 def stats(self, stat='stddev', mask=None):
[102]330 """
[135]331 Determine the specified statistic of the current beam/if/pol
[102]332 Takes a 'mask' as an optional parameter to specify which
333 channels should be excluded.
334 Parameters:
[135]335 stat: 'min', 'max', 'sumsq', 'sum', 'mean'
336 'var', 'stddev', 'avdev', 'rms', 'median'
337 mask: an optional mask specifying where the statistic
[102]338 should be determined.
339 Example:
[113]340 scan.set_unit('channel')
[1118]341 msk = scan.create_mask([100, 200], [500, 600])
[135]342 scan.stats(stat='mean', mask=m)
[102]343 """
344 if mask == None:
[876]345 mask = []
[1118]346 axes = ['Beam', 'IF', 'Pol', 'Time']
[876]347 if not self._check_ifs():
[1118]348 raise ValueError("Cannot apply mask as the IFs have different "
349 "number of channels. Please use setselection() "
350 "to select individual IFs")
[256]351
[876]352 statvals = self._math._stats(self, mask, stat)
353 out = ''
354 axes = []
355 for i in range(self.nrow()):
356 axis = []
357 axis.append(self.getscan(i))
358 axis.append(self.getbeam(i))
359 axis.append(self.getif(i))
360 axis.append(self.getpol(i))
361 axis.append(self.getcycle(i))
362 axes.append(axis)
363 tm = self._gettime(i)
364 src = self._getsourcename(i)
365 out += 'Scan[%d] (%s) ' % (axis[0], src)
366 out += 'Time[%s]:\n' % (tm)
367 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
368 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
369 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
370 out += '= %3.3f\n' % (statvals[i])
371 out += "--------------------------------------------------\n"
[256]372
[876]373 if rcParams['verbose']:
374 print "--------------------------------------------------"
[1118]375 print " ", stat
[876]376 print "--------------------------------------------------"
377 print out
[1295]378 #else:
379 #retval = { 'axesnames': ['scanno', 'beamno', 'ifno', 'polno', 'cycleno'],
380 # 'axes' : axes,
381 # 'data': statvals}
382 return statvals
[102]383
[1118]384 def stddev(self, mask=None):
[135]385 """
386 Determine the standard deviation of the current beam/if/pol
387 Takes a 'mask' as an optional parameter to specify which
388 channels should be excluded.
389 Parameters:
390 mask: an optional mask specifying where the standard
391 deviation should be determined.
392
393 Example:
394 scan.set_unit('channel')
[1118]395 msk = scan.create_mask([100, 200], [500, 600])
[135]396 scan.stddev(mask=m)
397 """
[1118]398 return self.stats(stat='stddev', mask=mask);
[135]399
[1003]400
[1259]401 def get_column_names(self):
[1003]402 """
403 Return a list of column names, which can be used for selection.
404 """
[1259]405 return list(Scantable.get_column_names(self))
[1003]406
[876]407 def get_tsys(self):
[113]408 """
409 Return the System temperatures.
410 Returns:
[876]411 a list of Tsys values for the current selection
[113]412 """
[256]413
[876]414 return self._row_callback(self._gettsys, "Tsys")
[256]415
[876]416 def _row_callback(self, callback, label):
417 axes = []
[1118]418 axesnames = ['scanno', 'beamno', 'ifno', 'polno', 'cycleno']
[876]419 out = ""
[1118]420 outvec = []
[876]421 for i in range(self.nrow()):
422 axis = []
423 axis.append(self.getscan(i))
424 axis.append(self.getbeam(i))
425 axis.append(self.getif(i))
426 axis.append(self.getpol(i))
427 axis.append(self.getcycle(i))
428 axes.append(axis)
429 tm = self._gettime(i)
430 src = self._getsourcename(i)
431 out += 'Scan[%d] (%s) ' % (axis[0], src)
432 out += 'Time[%s]:\n' % (tm)
433 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
434 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
435 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
436 outvec.append(callback(i))
437 out += '= %3.3f\n' % (outvec[i])
438 out += "--------------------------------------------------\n"
439 if rcParams['verbose']:
440 print "--------------------------------------------------"
441 print " %s" % (label)
442 print "--------------------------------------------------"
443 print out
[1175]444 # disabled because the vector seems more useful
445 #retval = {'axesnames': axesnames, 'axes': axes, 'data': outvec}
446 return outvec
[256]447
[1070]448 def _get_column(self, callback, row=-1):
449 """
450 """
451 if row == -1:
452 return [callback(i) for i in range(self.nrow())]
453 else:
454 if 0 <= row < self.nrow():
455 return callback(row)
[256]456
[1070]457
[1348]458 def get_time(self, row=-1, asdatetime=False):
[113]459 """
460 Get a list of time stamps for the observations.
[1348]461 Return a datetime object for each integration time stamp in the scantable.
[113]462 Parameters:
[1348]463 row: row no of integration. Default -1 return all rows
464 asdatetime: return values as datetime objects rather than strings
[113]465 Example:
466 none
467 """
[1175]468 from time import strptime
469 from datetime import datetime
[1392]470 times = self._get_column(self._gettime, row)
[1348]471 if not asdatetime:
[1392]472 return times
[1175]473 format = "%Y/%m/%d/%H:%M:%S"
474 if isinstance(times, list):
475 return [datetime(*strptime(i, format)[:6]) for i in times]
476 else:
477 return datetime(*strptime(times, format)[:6])
[102]478
[1348]479
480 def get_inttime(self, row=-1):
481 """
482 Get a list of integration times for the observations.
483 Return a time in seconds for each integration in the scantable.
484 Parameters:
485 row: row no of integration. Default -1 return all rows.
486 Example:
487 none
488 """
489 return self._get_column(self._getinttime, row)
490
491
[714]492 def get_sourcename(self, row=-1):
493 """
[794]494 Get a list source names for the observations.
[714]495 Return a string for each integration in the scantable.
496 Parameters:
[1348]497 row: row no of integration. Default -1 return all rows.
[714]498 Example:
499 none
500 """
[1070]501 return self._get_column(self._getsourcename, row)
[714]502
[794]503 def get_elevation(self, row=-1):
504 """
505 Get a list of elevations for the observations.
506 Return a float for each integration in the scantable.
507 Parameters:
[1348]508 row: row no of integration. Default -1 return all rows.
[794]509 Example:
510 none
511 """
[1070]512 return self._get_column(self._getelevation, row)
[794]513
514 def get_azimuth(self, row=-1):
515 """
516 Get a list of azimuths for the observations.
517 Return a float for each integration in the scantable.
518 Parameters:
[1348]519 row: row no of integration. Default -1 return all rows.
[794]520 Example:
521 none
522 """
[1070]523 return self._get_column(self._getazimuth, row)
[794]524
525 def get_parangle(self, row=-1):
526 """
527 Get a list of parallactic angles for the observations.
528 Return a float for each integration in the scantable.
529 Parameters:
[1348]530 row: row no of integration. Default -1 return all rows.
[794]531 Example:
532 none
533 """
[1070]534 return self._get_column(self._getparangle, row)
[794]535
[1070]536 def get_direction(self, row=-1):
537 """
538 Get a list of Positions on the sky (direction) for the observations.
539 Return a float for each integration in the scantable.
540 Parameters:
541 row: row no of integration. Default -1 return all rows
542 Example:
543 none
544 """
545 return self._get_column(self._getdirection, row)
546
[1391]547 def get_directionval(self, row=-1):
548 """
549 Get a list of Positions on the sky (direction) for the observations.
550 Return a float for each integration in the scantable.
551 Parameters:
552 row: row no of integration. Default -1 return all rows
553 Example:
554 none
555 """
556 return self._get_column(self._getdirectionvec, row)
557
558
[102]559 def set_unit(self, unit='channel'):
560 """
561 Set the unit for all following operations on this scantable
562 Parameters:
563 unit: optional unit, default is 'channel'
[1118]564 one of '*Hz', 'km/s', 'channel', ''
[102]565 """
[484]566 varlist = vars()
[1118]567 if unit in ['', 'pixel', 'channel']:
[113]568 unit = ''
569 inf = list(self._getcoordinfo())
570 inf[0] = unit
571 self._setcoordinfo(inf)
[1118]572 self._add_history("set_unit", varlist)
[113]573
[484]574 def set_instrument(self, instr):
[358]575 """
[1348]576 Set the instrument for subsequent processing.
[358]577 Parameters:
[710]578 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]579 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[358]580 """
581 self._setInstrument(instr)
[1118]582 self._add_history("set_instument", vars())
[718]583 print_log()
[358]584
[1190]585 def set_feedtype(self, feedtype):
586 """
587 Overwrite the feed type, which might not be set correctly.
588 Parameters:
589 feedtype: 'linear' or 'circular'
590 """
591 self._setfeedtype(feedtype)
592 self._add_history("set_feedtype", vars())
593 print_log()
594
[276]595 def set_doppler(self, doppler='RADIO'):
596 """
597 Set the doppler for all following operations on this scantable.
598 Parameters:
599 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
600 """
[484]601 varlist = vars()
[276]602 inf = list(self._getcoordinfo())
603 inf[2] = doppler
604 self._setcoordinfo(inf)
[1118]605 self._add_history("set_doppler", vars())
[718]606 print_log()
[710]607
[226]608 def set_freqframe(self, frame=None):
[113]609 """
610 Set the frame type of the Spectral Axis.
611 Parameters:
[591]612 frame: an optional frame type, default 'LSRK'. Valid frames are:
[1118]613 'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
614 'GEO', 'GALACTO', 'LGROUP', 'CMB'
[113]615 Examples:
616 scan.set_freqframe('BARY')
617 """
[484]618 if frame is None: frame = rcParams['scantable.freqframe']
619 varlist = vars()
[1118]620 valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
621 'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]622
[989]623 if frame in valid:
[113]624 inf = list(self._getcoordinfo())
625 inf[1] = frame
626 self._setcoordinfo(inf)
[1118]627 self._add_history("set_freqframe", varlist)
[102]628 else:
[1118]629 msg = "Please specify a valid freq type. Valid types are:\n", valid
[718]630 if rcParams['verbose']:
631 print msg
632 else:
633 raise TypeError(msg)
634 print_log()
[710]635
[989]636 def set_dirframe(self, frame=""):
637 """
638 Set the frame type of the Direction on the sky.
639 Parameters:
640 frame: an optional frame type, default ''. Valid frames are:
641 'J2000', 'B1950', 'GALACTIC'
642 Examples:
643 scan.set_dirframe('GALACTIC')
644 """
645 varlist = vars()
646 try:
647 Scantable.set_dirframe(self, frame)
[1118]648 except RuntimeError, msg:
[989]649 if rcParams['verbose']:
650 print msg
651 else:
652 raise
[1118]653 self._add_history("set_dirframe", varlist)
[989]654
[113]655 def get_unit(self):
656 """
657 Get the default unit set in this scantable
658 Returns:
659 A unit string
660 """
661 inf = self._getcoordinfo()
662 unit = inf[0]
663 if unit == '': unit = 'channel'
664 return unit
[102]665
[158]666 def get_abcissa(self, rowno=0):
[102]667 """
[158]668 Get the abcissa in the current coordinate setup for the currently
[113]669 selected Beam/IF/Pol
670 Parameters:
[226]671 rowno: an optional row number in the scantable. Default is the
672 first row, i.e. rowno=0
[113]673 Returns:
[1348]674 The abcissa values and the format string (as a dictionary)
[113]675 """
[256]676 abc = self._getabcissa(rowno)
[710]677 lbl = self._getabcissalabel(rowno)
[718]678 print_log()
[158]679 return abc, lbl
[113]680
[1118]681 def flag(self, mask=None):
[1001]682 """
683 Flag the selected data using an optional channel mask.
684 Parameters:
685 mask: an optional channel mask, created with create_mask. Default
686 (no mask) is all channels.
687 """
688 varlist = vars()
[1118]689 if mask is None:
690 mask = []
[1001]691 try:
692 self._flag(mask)
[1118]693 except RuntimeError, msg:
[1001]694 if rcParams['verbose']:
695 print msg
696 return
697 else: raise
698 self._add_history("flag", varlist)
699
[1203]700 def lag_flag(self, frequency, width=0.0, unit="GHz", insitu=None):
[1192]701 """
702 Flag the data in 'lag' space by providing a frequency to remove.
703 Flagged data in the scantable gets set to 0.0 before the fft.
704 No taper is applied.
705 Parameters:
[1348]706 frequency: the frequency (really a period within the bandwidth)
707 to remove
708 width: the width of the frequency to remove, to remove a
[1471]709 range of frequencies around the centre.
[1203]710 unit: the frequency unit (default "GHz")
711 Notes:
[1348]712 It is recommended to flag edges of the band or strong
713 signals beforehand.
[1192]714 """
715 if insitu is None: insitu = rcParams['insitu']
716 self._math._setinsitu(insitu)
717 varlist = vars()
[1370]718 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1. }
[1192]719 if not base.has_key(unit):
720 raise ValueError("%s is not a valid unit." % unit)
721 try:
[1200]722 s = scantable(self._math._lag_flag(self, frequency*base[unit],
723 width*base[unit]))
[1192]724 except RuntimeError, msg:
725 if rcParams['verbose']:
726 print msg
727 return
728 else: raise
729 s._add_history("lag_flag", varlist)
730 print_log()
731 if insitu:
732 self._assign(s)
733 else:
734 return s
[1001]735
[1192]736
[113]737 def create_mask(self, *args, **kwargs):
738 """
[1118]739 Compute and return a mask based on [min, max] windows.
[189]740 The specified windows are to be INCLUDED, when the mask is
[113]741 applied.
[102]742 Parameters:
[1118]743 [min, max], [min2, max2], ...
[1024]744 Pairs of start/end points (inclusive)specifying the regions
[102]745 to be masked
[189]746 invert: optional argument. If specified as True,
747 return an inverted mask, i.e. the regions
748 specified are EXCLUDED
[513]749 row: create the mask using the specified row for
750 unit conversions, default is row=0
751 only necessary if frequency varies over rows.
[102]752 Example:
[113]753 scan.set_unit('channel')
754 a)
[1118]755 msk = scan.create_mask([400, 500], [800, 900])
[189]756 # masks everything outside 400 and 500
[113]757 # and 800 and 900 in the unit 'channel'
758
759 b)
[1118]760 msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]761 # masks the regions between 400 and 500
[113]762 # and 800 and 900 in the unit 'channel'
[1024]763 c)
764 mask only channel 400
[1118]765 msk = scan.create_mask([400, 400])
[102]766 """
[513]767 row = 0
768 if kwargs.has_key("row"):
769 row = kwargs.get("row")
770 data = self._getabcissa(row)
[113]771 u = self._getcoordinfo()[0]
[718]772 if rcParams['verbose']:
[113]773 if u == "": u = "channel"
[718]774 msg = "The current mask window unit is %s" % u
[1118]775 i = self._check_ifs()
776 if not i:
[876]777 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
[718]778 asaplog.push(msg)
[102]779 n = self.nchan()
[1295]780 msk = _n_bools(n, False)
[710]781 # test if args is a 'list' or a 'normal *args - UGLY!!!
782
[1118]783 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
784 and args or args[0]
[710]785 for window in ws:
[102]786 if (len(window) != 2 or window[0] > window[1] ):
[1118]787 raise TypeError("A window needs to be defined as [min, max]")
[102]788 for i in range(n):
[1024]789 if data[i] >= window[0] and data[i] <= window[1]:
[1295]790 msk[i] = True
[113]791 if kwargs.has_key('invert'):
792 if kwargs.get('invert'):
[1295]793 msk = mask_not(msk)
[718]794 print_log()
[102]795 return msk
[710]796
[256]797 def get_restfreqs(self):
798 """
799 Get the restfrequency(s) stored in this scantable.
800 The return value(s) are always of unit 'Hz'
801 Parameters:
802 none
803 Returns:
804 a list of doubles
805 """
806 return list(self._getrestfreqs())
[102]807
[402]808
[931]809 def set_restfreqs(self, freqs=None, unit='Hz'):
810 """
811 Set or replace the restfrequency specified and
812 If the 'freqs' argument holds a scalar,
813 then that rest frequency will be applied to all the selected
814 data. If the 'freqs' argument holds
815 a vector, then it MUST be of equal or smaller length than
816 the number of IFs (and the available restfrequencies will be
817 replaced by this vector). In this case, *all* data have
818 the restfrequency set per IF according
819 to the corresponding value you give in the 'freqs' vector.
[1118]820 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
[931]821 IF 1 gets restfreq 2e9.
[1395]822 You can also specify the frequencies via a linecatalog.
[1153]823
[931]824 Parameters:
825 freqs: list of rest frequency values or string idenitfiers
826 unit: unit for rest frequency (default 'Hz')
[402]827
[931]828 Example:
829 # set the given restfrequency for the whole table
830 scan.set_restfreqs(freqs=1.4e9)
[1348]831 # If thee number of IFs in the data is >= 2 IF0 gets the first
[931]832 # value IF1 the second...
[1118]833 scan.set_restfreqs(freqs=[1.4e9, 1.67e9])
[931]834 #set the given restfrequency for the whole table (by name)
835 scan.set_restfreqs(freqs="OH1667")
[391]836
[931]837 Note:
838 To do more sophisticate Restfrequency setting, e.g. on a
839 source and IF basis, use scantable.set_selection() before using
840 this function.
841 # provide your scantable is call scan
842 selection = selector()
843 selection.set_name("ORION*")
844 selection.set_ifs([1])
845 scan.set_selection(selection)
846 scan.set_restfreqs(freqs=86.6e9)
847
848 """
849 varlist = vars()
[1157]850 from asap import linecatalog
851 # simple value
[1118]852 if isinstance(freqs, int) or isinstance(freqs, float):
[1153]853 self._setrestfreqs(freqs, "",unit)
[1157]854 # list of values
[1118]855 elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]856 # list values are scalars
[1118]857 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[931]858 sel = selector()
859 savesel = self._getselection()
[1322]860 iflist = self.getifnos()
[931]861 for i in xrange(len(freqs)):
[1322]862 sel.set_ifs(iflist[i])
[931]863 self._setselection(sel)
[1153]864 self._setrestfreqs(freqs[i], "",unit)
[931]865 self._setselection(savesel)
[1157]866 # list values are tuples, (value, name)
867 elif isinstance(freqs[-1], dict):
868 sel = selector()
869 savesel = self._getselection()
[1322]870 iflist = self.getifnos()
[1157]871 for i in xrange(len(freqs)):
[1322]872 sel.set_ifs(iflist[i])
[1259]873 self._setselection(sel)
[1157]874 self._setrestfreqs(freqs[i]["value"],
875 freqs[i]["name"], "MHz")
876 self._setselection(savesel)
877 # freqs are to be taken from a linecatalog
[1153]878 elif isinstance(freqs, linecatalog):
879 sel = selector()
880 savesel = self._getselection()
[1322]881 iflist = self.getifnos()
[1153]882 for i in xrange(freqs.nrow()):
[1322]883 sel.set_ifs(iflist[i])
[1153]884 self._setselection(sel)
885 self._setrestfreqs(freqs.get_frequency(i),
886 freqs.get_name(i), "MHz")
887 # ensure that we are not iterating past nIF
888 if i == self.nif()-1: break
889 self._setselection(savesel)
[931]890 else:
891 return
892 self._add_history("set_restfreqs", varlist)
893
[1360]894 def shift_refpix(self, delta):
895 """
896 Shift the reference pixel of the Spectra Coordinate by an
897 integer amount.
898 Parameters:
899 delta: the amount to shift by
900 Note:
901 Be careful using this with broadband data.
902 """
903 Scantable.shift(self, delta)
[931]904
[1259]905 def history(self, filename=None):
906 """
907 Print the history. Optionally to a file.
[1348]908 Parameters:
909 filename: The name of the file to save the history to.
[1259]910 """
[484]911 hist = list(self._gethistory())
[794]912 out = "-"*80
[484]913 for h in hist:
[489]914 if h.startswith("---"):
[794]915 out += "\n"+h
[489]916 else:
917 items = h.split("##")
918 date = items[0]
919 func = items[1]
920 items = items[2:]
[794]921 out += "\n"+date+"\n"
922 out += "Function: %s\n Parameters:" % (func)
[489]923 for i in items:
924 s = i.split("=")
[1118]925 out += "\n %s = %s" % (s[0], s[1])
[794]926 out += "\n"+"-"*80
[1259]927 if filename is not None:
928 if filename is "":
929 filename = 'scantable_history.txt'
930 import os
931 filename = os.path.expandvars(os.path.expanduser(filename))
932 if not os.path.isdir(filename):
933 data = open(filename, 'w')
934 data.write(out)
935 data.close()
936 else:
937 msg = "Illegal file name '%s'." % (filename)
938 if rcParams['verbose']:
939 print msg
940 else:
941 raise IOError(msg)
942 if rcParams['verbose']:
943 try:
944 from IPython.genutils import page as pager
945 except ImportError:
946 from pydoc import pager
947 pager(out)
948 else:
949 return out
[484]950 return
[513]951 #
952 # Maths business
953 #
954
[931]955 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
[513]956 """
[1070]957 Return the (time) weighted average of a scan.
[513]958 Note:
[1070]959 in channels only - align if necessary
[513]960 Parameters:
961 mask: an optional mask (only used for 'var' and 'tsys'
962 weighting)
[558]963 scanav: True averages each scan separately
964 False (default) averages all scans together,
[1099]965 weight: Weighting scheme.
966 'none' (mean no weight)
967 'var' (1/var(spec) weighted)
968 'tsys' (1/Tsys**2 weighted)
969 'tint' (integration time weighted)
970 'tintsys' (Tint/Tsys**2)
971 'median' ( median averaging)
[535]972 The default is 'tint'
[931]973 align: align the spectra in velocity before averaging. It takes
974 the time of the first spectrum as reference time.
[513]975 Example:
976 # time average the scantable without using a mask
[710]977 newscan = scan.average_time()
[513]978 """
979 varlist = vars()
[976]980 if weight is None: weight = 'TINT'
[513]981 if mask is None: mask = ()
[1099]982 if scanav: scanav = "SCAN"
983 else: scanav = "NONE"
[1118]984 scan = (self, )
[989]985 try:
[1118]986 if align:
987 scan = (self.freq_align(insitu=False), )
988 s = None
989 if weight.upper() == 'MEDIAN':
990 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
991 scanav))
992 else:
993 s = scantable(self._math._average(scan, mask, weight.upper(),
994 scanav))
995 except RuntimeError, msg:
[989]996 if rcParams['verbose']:
997 print msg
998 return
999 else: raise
[1099]1000 s._add_history("average_time", varlist)
[718]1001 print_log()
[513]1002 return s
[710]1003
[876]1004 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[513]1005 """
1006 Return a scan where all spectra are converted to either
1007 Jansky or Kelvin depending upon the flux units of the scan table.
1008 By default the function tries to look the values up internally.
1009 If it can't find them (or if you want to over-ride), you must
1010 specify EITHER jyperk OR eta (and D which it will try to look up
1011 also if you don't set it). jyperk takes precedence if you set both.
1012 Parameters:
1013 jyperk: the Jy / K conversion factor
1014 eta: the aperture efficiency
1015 d: the geomtric diameter (metres)
1016 insitu: if False a new scantable is returned.
1017 Otherwise, the scaling is done in-situ
1018 The default is taken from .asaprc (False)
1019 """
1020 if insitu is None: insitu = rcParams['insitu']
[876]1021 self._math._setinsitu(insitu)
[513]1022 varlist = vars()
1023 if jyperk is None: jyperk = -1.0
1024 if d is None: d = -1.0
1025 if eta is None: eta = -1.0
[876]1026 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1027 s._add_history("convert_flux", varlist)
1028 print_log()
1029 if insitu: self._assign(s)
1030 else: return s
[513]1031
[876]1032 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[513]1033 """
1034 Return a scan after applying a gain-elevation correction.
1035 The correction can be made via either a polynomial or a
1036 table-based interpolation (and extrapolation if necessary).
1037 You specify polynomial coefficients, an ascii table or neither.
1038 If you specify neither, then a polynomial correction will be made
1039 with built in coefficients known for certain telescopes (an error
1040 will occur if the instrument is not known).
1041 The data and Tsys are *divided* by the scaling factors.
1042 Parameters:
1043 poly: Polynomial coefficients (default None) to compute a
1044 gain-elevation correction as a function of
1045 elevation (in degrees).
1046 filename: The name of an ascii file holding correction factors.
1047 The first row of the ascii file must give the column
1048 names and these MUST include columns
1049 "ELEVATION" (degrees) and "FACTOR" (multiply data
1050 by this) somewhere.
1051 The second row must give the data type of the
1052 column. Use 'R' for Real and 'I' for Integer.
1053 An example file would be
1054 (actual factors are arbitrary) :
1055
1056 TIME ELEVATION FACTOR
1057 R R R
1058 0.1 0 0.8
1059 0.2 20 0.85
1060 0.3 40 0.9
1061 0.4 60 0.85
1062 0.5 80 0.8
1063 0.6 90 0.75
1064 method: Interpolation method when correcting from a table.
1065 Values are "nearest", "linear" (default), "cubic"
1066 and "spline"
1067 insitu: if False a new scantable is returned.
1068 Otherwise, the scaling is done in-situ
1069 The default is taken from .asaprc (False)
1070 """
1071
1072 if insitu is None: insitu = rcParams['insitu']
[876]1073 self._math._setinsitu(insitu)
[513]1074 varlist = vars()
1075 if poly is None:
[1118]1076 poly = ()
[513]1077 from os.path import expandvars
1078 filename = expandvars(filename)
[876]1079 s = scantable(self._math._gainel(self, poly, filename, method))
1080 s._add_history("gain_el", varlist)
1081 print_log()
1082 if insitu: self._assign(s)
1083 else: return s
[710]1084
[931]1085 def freq_align(self, reftime=None, method='cubic', insitu=None):
[513]1086 """
1087 Return a scan where all rows have been aligned in frequency/velocity.
1088 The alignment frequency frame (e.g. LSRK) is that set by function
1089 set_freqframe.
1090 Parameters:
1091 reftime: reference time to align at. By default, the time of
1092 the first row of data is used.
1093 method: Interpolation method for regridding the spectra.
1094 Choose from "nearest", "linear", "cubic" (default)
1095 and "spline"
1096 insitu: if False a new scantable is returned.
1097 Otherwise, the scaling is done in-situ
1098 The default is taken from .asaprc (False)
1099 """
[931]1100 if insitu is None: insitu = rcParams["insitu"]
[876]1101 self._math._setinsitu(insitu)
[513]1102 varlist = vars()
[931]1103 if reftime is None: reftime = ""
1104 s = scantable(self._math._freq_align(self, reftime, method))
[876]1105 s._add_history("freq_align", varlist)
1106 print_log()
1107 if insitu: self._assign(s)
1108 else: return s
[513]1109
[876]1110 def opacity(self, tau, insitu=None):
[513]1111 """
1112 Apply an opacity correction. The data
1113 and Tsys are multiplied by the correction factor.
1114 Parameters:
1115 tau: Opacity from which the correction factor is
1116 exp(tau*ZD)
1117 where ZD is the zenith-distance
1118 insitu: if False a new scantable is returned.
1119 Otherwise, the scaling is done in-situ
1120 The default is taken from .asaprc (False)
1121 """
1122 if insitu is None: insitu = rcParams['insitu']
[876]1123 self._math._setinsitu(insitu)
[513]1124 varlist = vars()
[876]1125 s = scantable(self._math._opacity(self, tau))
1126 s._add_history("opacity", varlist)
1127 print_log()
1128 if insitu: self._assign(s)
1129 else: return s
[513]1130
1131 def bin(self, width=5, insitu=None):
1132 """
1133 Return a scan where all spectra have been binned up.
[1348]1134 Parameters:
[513]1135 width: The bin width (default=5) in pixels
1136 insitu: if False a new scantable is returned.
1137 Otherwise, the scaling is done in-situ
1138 The default is taken from .asaprc (False)
1139 """
1140 if insitu is None: insitu = rcParams['insitu']
[876]1141 self._math._setinsitu(insitu)
[513]1142 varlist = vars()
[876]1143 s = scantable(self._math._bin(self, width))
[1118]1144 s._add_history("bin", varlist)
[876]1145 print_log()
1146 if insitu: self._assign(s)
1147 else: return s
[513]1148
[710]1149
[513]1150 def resample(self, width=5, method='cubic', insitu=None):
1151 """
[1348]1152 Return a scan where all spectra have been binned up.
1153
1154 Parameters:
[513]1155 width: The bin width (default=5) in pixels
1156 method: Interpolation method when correcting from a table.
1157 Values are "nearest", "linear", "cubic" (default)
1158 and "spline"
1159 insitu: if False a new scantable is returned.
1160 Otherwise, the scaling is done in-situ
1161 The default is taken from .asaprc (False)
1162 """
1163 if insitu is None: insitu = rcParams['insitu']
[876]1164 self._math._setinsitu(insitu)
[513]1165 varlist = vars()
[876]1166 s = scantable(self._math._resample(self, method, width))
[1118]1167 s._add_history("resample", varlist)
[876]1168 print_log()
1169 if insitu: self._assign(s)
1170 else: return s
[513]1171
1172
[946]1173 def average_pol(self, mask=None, weight='none'):
1174 """
1175 Average the Polarisations together.
1176 Parameters:
1177 mask: An optional mask defining the region, where the
1178 averaging will be applied. The output will have all
1179 specified points masked.
1180 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1181 weighted), or 'tsys' (1/Tsys**2 weighted)
1182 """
1183 varlist = vars()
1184 if mask is None:
1185 mask = ()
[1010]1186 s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]1187 s._add_history("average_pol", varlist)
[946]1188 print_log()
[992]1189 return s
[513]1190
[1145]1191 def average_beam(self, mask=None, weight='none'):
1192 """
1193 Average the Beams together.
1194 Parameters:
1195 mask: An optional mask defining the region, where the
1196 averaging will be applied. The output will have all
1197 specified points masked.
1198 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1199 weighted), or 'tsys' (1/Tsys**2 weighted)
1200 """
1201 varlist = vars()
1202 if mask is None:
1203 mask = ()
1204 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1205 s._add_history("average_beam", varlist)
1206 print_log()
1207 return s
1208
[992]1209 def convert_pol(self, poltype=None):
1210 """
1211 Convert the data to a different polarisation type.
1212 Parameters:
1213 poltype: The new polarisation type. Valid types are:
1214 "linear", "stokes" and "circular"
1215 """
1216 varlist = vars()
1217 try:
1218 s = scantable(self._math._convertpol(self, poltype))
[1118]1219 except RuntimeError, msg:
[992]1220 if rcParams['verbose']:
[1118]1221 print msg
1222 return
[992]1223 else:
1224 raise
[1118]1225 s._add_history("convert_pol", varlist)
[992]1226 print_log()
1227 return s
1228
[876]1229 def smooth(self, kernel="hanning", width=5.0, insitu=None):
[513]1230 """
1231 Smooth the spectrum by the specified kernel (conserving flux).
1232 Parameters:
1233 kernel: The type of smoothing kernel. Select from
[1373]1234 'hanning' (default), 'gaussian', 'boxcar' and
1235 'rmedian'
[513]1236 width: The width of the kernel in pixels. For hanning this is
1237 ignored otherwise it defauls to 5 pixels.
1238 For 'gaussian' it is the Full Width Half
1239 Maximum. For 'boxcar' it is the full width.
[1373]1240 For 'rmedian' it is the half width.
[513]1241 insitu: if False a new scantable is returned.
1242 Otherwise, the scaling is done in-situ
1243 The default is taken from .asaprc (False)
1244 Example:
1245 none
1246 """
1247 if insitu is None: insitu = rcParams['insitu']
[876]1248 self._math._setinsitu(insitu)
[513]1249 varlist = vars()
[1118]1250 s = scantable(self._math._smooth(self, kernel.lower(), width))
[876]1251 s._add_history("smooth", varlist)
1252 print_log()
1253 if insitu: self._assign(s)
1254 else: return s
[513]1255
[876]1256
[1391]1257 def poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None):
[513]1258 """
1259 Return a scan which has been baselined (all rows) by a polynomial.
1260 Parameters:
[794]1261 mask: an optional mask
1262 order: the order of the polynomial (default is 0)
[1061]1263 plot: plot the fit and the residual. In this each
1264 indivual fit has to be approved, by typing 'y'
1265 or 'n'
[1391]1266 uselin: use linear polynomial fit
[794]1267 insitu: if False a new scantable is returned.
1268 Otherwise, the scaling is done in-situ
1269 The default is taken from .asaprc (False)
[513]1270 Example:
1271 # return a scan baselined by a third order polynomial,
1272 # not using a mask
1273 bscan = scan.poly_baseline(order=3)
[579]1274 """
[513]1275 if insitu is None: insitu = rcParams['insitu']
1276 varlist = vars()
1277 if mask is None:
[1295]1278 mask = [True for i in xrange(self.nchan(-1))]
[513]1279 from asap.asapfitter import fitter
[1217]1280 try:
1281 f = fitter()
1282 f.set_scan(self, mask)
[1391]1283 #f.set_function(poly=order)
1284 if uselin:
1285 f.set_function(lpoly=order)
1286 else:
1287 f.set_function(poly=order)
[1217]1288 s = f.auto_fit(insitu, plot=plot)
1289 s._add_history("poly_baseline", varlist)
1290 print_log()
1291 if insitu: self._assign(s)
1292 else: return s
1293 except RuntimeError:
1294 msg = "The fit failed, possibly because it didn't converge."
1295 if rcParams['verbose']:
1296 print msg
1297 return
1298 else:
1299 raise RuntimeError(msg)
[513]1300
[1217]1301
[1118]1302 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
[1280]1303 threshold=3, chan_avg_limit=1, plot=False,
1304 insitu=None):
[880]1305 """
1306 Return a scan which has been baselined (all rows) by a polynomial.
1307 Spectral lines are detected first using linefinder and masked out
1308 to avoid them affecting the baseline solution.
1309
1310 Parameters:
1311 mask: an optional mask retreived from scantable
1312 edge: an optional number of channel to drop at
1313 the edge of spectrum. If only one value is
1314 specified, the same number will be dropped from
1315 both sides of the spectrum. Default is to keep
[907]1316 all channels. Nested tuples represent individual
[976]1317 edge selection for different IFs (a number of spectral
1318 channels can be different)
[880]1319 order: the order of the polynomial (default is 0)
1320 threshold: the threshold used by line finder. It is better to
1321 keep it large as only strong lines affect the
1322 baseline solution.
[1280]1323 chan_avg_limit:
1324 a maximum number of consequtive spectral channels to
1325 average during the search of weak and broad lines.
1326 The default is no averaging (and no search for weak
1327 lines). If such lines can affect the fitted baseline
1328 (e.g. a high order polynomial is fitted), increase this
1329 parameter (usually values up to 8 are reasonable). Most
1330 users of this method should find the default value
1331 sufficient.
[1061]1332 plot: plot the fit and the residual. In this each
1333 indivual fit has to be approved, by typing 'y'
1334 or 'n'
[880]1335 insitu: if False a new scantable is returned.
1336 Otherwise, the scaling is done in-situ
1337 The default is taken from .asaprc (False)
1338
1339 Example:
1340 scan2=scan.auto_poly_baseline(order=7)
1341 """
1342 if insitu is None: insitu = rcParams['insitu']
1343 varlist = vars()
1344 from asap.asapfitter import fitter
1345 from asap.asaplinefind import linefinder
1346 from asap import _is_sequence_or_number as _is_valid
1347
[976]1348 # check whether edge is set up for each IF individually
[1118]1349 individualedge = False;
1350 if len(edge) > 1:
1351 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1352 individualedge = True;
[907]1353
[1118]1354 if not _is_valid(edge, int) and not individualedge:
[909]1355 raise ValueError, "Parameter 'edge' has to be an integer or a \
[907]1356 pair of integers specified as a tuple. Nested tuples are allowed \
1357 to make individual selection for different IFs."
[919]1358
[1118]1359 curedge = (0, 0)
1360 if individualedge:
1361 for edgepar in edge:
1362 if not _is_valid(edgepar, int):
1363 raise ValueError, "Each element of the 'edge' tuple has \
1364 to be a pair of integers or an integer."
[907]1365 else:
[1118]1366 curedge = edge;
[880]1367
1368 # setup fitter
1369 f = fitter()
1370 f.set_function(poly=order)
1371
1372 # setup line finder
[1118]1373 fl = linefinder()
[1268]1374 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
[880]1375
1376 if not insitu:
[1118]1377 workscan = self.copy()
[880]1378 else:
[1118]1379 workscan = self
[880]1380
[907]1381 fl.set_scan(workscan)
1382
[1118]1383 rows = range(workscan.nrow())
[880]1384 asaplog.push("Processing:")
1385 for r in rows:
[1118]1386 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1387 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1388 workscan.getpol(r), workscan.getcycle(r))
[880]1389 asaplog.push(msg, False)
[907]1390
[976]1391 # figure out edge parameter
[1118]1392 if individualedge:
1393 if len(edge) >= workscan.getif(r):
1394 raise RuntimeError, "Number of edge elements appear to " \
1395 "be less than the number of IFs"
1396 curedge = edge[workscan.getif(r)]
[919]1397
[976]1398 # setup line finder
[1118]1399 fl.find_lines(r, mask, curedge)
[1536]1400 f.set_data(workscan._getabcissa(r), workscan._getspectrum(r),
1401 mask_and(workscan._getmask(r), fl.get_mask()))
[880]1402 f.fit()
1403 x = f.get_parameters()
[1061]1404 if plot:
1405 f.plot(residual=True)
1406 x = raw_input("Accept fit ( [y]/n ): ")
1407 if x.upper() == 'N':
1408 continue
[880]1409 workscan._setspectrum(f.fitter.getresidual(), r)
[1061]1410 if plot:
1411 f._p.unmap()
1412 f._p = None
1413 workscan._add_history("auto_poly_baseline", varlist)
[880]1414 if insitu:
1415 self._assign(workscan)
1416 else:
1417 return workscan
1418
[914]1419 def rotate_linpolphase(self, angle):
1420 """
1421 Rotate the phase of the complex polarization O=Q+iU correlation.
1422 This is always done in situ in the raw data. So if you call this
1423 function more than once then each call rotates the phase further.
1424 Parameters:
1425 angle: The angle (degrees) to rotate (add) by.
1426 Examples:
1427 scan.rotate_linpolphase(2.3)
1428 """
1429 varlist = vars()
[936]1430 self._math._rotate_linpolphase(self, angle)
[914]1431 self._add_history("rotate_linpolphase", varlist)
1432 print_log()
1433 return
[710]1434
[513]1435
[914]1436 def rotate_xyphase(self, angle):
1437 """
1438 Rotate the phase of the XY correlation. This is always done in situ
1439 in the data. So if you call this function more than once
1440 then each call rotates the phase further.
1441 Parameters:
1442 angle: The angle (degrees) to rotate (add) by.
1443 Examples:
1444 scan.rotate_xyphase(2.3)
1445 """
1446 varlist = vars()
[936]1447 self._math._rotate_xyphase(self, angle)
[914]1448 self._add_history("rotate_xyphase", varlist)
1449 print_log()
1450 return
1451
1452 def swap_linears(self):
1453 """
[1348]1454 Swap the linear polarisations XX and YY, or better the first two
1455 polarisations as this also works for ciculars.
[914]1456 """
1457 varlist = vars()
[936]1458 self._math._swap_linears(self)
[914]1459 self._add_history("swap_linears", varlist)
1460 print_log()
1461 return
1462
1463 def invert_phase(self):
1464 """
1465 Invert the phase of the complex polarisation
1466 """
1467 varlist = vars()
[936]1468 self._math._invert_phase(self)
[914]1469 self._add_history("invert_phase", varlist)
1470 print_log()
1471 return
1472
[876]1473 def add(self, offset, insitu=None):
[513]1474 """
1475 Return a scan where all spectra have the offset added
1476 Parameters:
1477 offset: the offset
1478 insitu: if False a new scantable is returned.
1479 Otherwise, the scaling is done in-situ
1480 The default is taken from .asaprc (False)
1481 """
1482 if insitu is None: insitu = rcParams['insitu']
[876]1483 self._math._setinsitu(insitu)
[513]1484 varlist = vars()
[876]1485 s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]1486 s._add_history("add", varlist)
[876]1487 print_log()
1488 if insitu:
1489 self._assign(s)
1490 else:
[513]1491 return s
1492
[1308]1493 def scale(self, factor, tsys=True, insitu=None):
[513]1494 """
1495 Return a scan where all spectra are scaled by the give 'factor'
1496 Parameters:
1497 factor: the scaling factor
1498 insitu: if False a new scantable is returned.
1499 Otherwise, the scaling is done in-situ
1500 The default is taken from .asaprc (False)
1501 tsys: if True (default) then apply the operation to Tsys
1502 as well as the data
1503 """
1504 if insitu is None: insitu = rcParams['insitu']
[876]1505 self._math._setinsitu(insitu)
[513]1506 varlist = vars()
[876]1507 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
[1118]1508 s._add_history("scale", varlist)
[876]1509 print_log()
1510 if insitu:
1511 self._assign(s)
1512 else:
[513]1513 return s
1514
[1504]1515 def set_sourcetype(self, match, matchtype="pattern",
1516 sourcetype="reference"):
[1502]1517 """
1518 Set the type of the source to be an source or reference scan
1519 using the provided pattern:
1520 Parameters:
[1504]1521 match: a Unix style pattern, regular expression or selector
1522 matchtype: 'pattern' (default) UNIX style pattern or
1523 'regex' regular expression
[1502]1524 sourcetype: the type of the source to use (source/reference)
1525 """
1526 varlist = vars()
1527 basesel = self.get_selection()
1528 stype = -1
1529 if sourcetype.lower().startswith("r"):
1530 stype = 1
1531 elif sourcetype.lower().startswith("s"):
1532 stype = 0
[1504]1533 else:
[1502]1534 raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
[1504]1535 if matchtype.lower().startswith("p"):
1536 matchtype = "pattern"
1537 elif matchtype.lower().startswith("r"):
1538 matchtype = "regex"
1539 else:
1540 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
[1502]1541 sel = selector()
1542 if isinstance(match, selector):
1543 sel = match
1544 else:
[1504]1545 sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
[1502]1546 self.set_selection(basesel+sel)
1547 self._setsourcetype(stype)
1548 self.set_selection(basesel)
1549 s._add_history("set_sourcetype", varlist)
1550
[1348]1551 def auto_quotient(self, preserve=True, mode='paired'):
[670]1552 """
1553 This function allows to build quotients automatically.
1554 It assumes the observation to have the same numer of
1555 "ons" and "offs"
1556 Parameters:
[710]1557 preserve: you can preserve (default) the continuum or
1558 remove it. The equations used are
[670]1559 preserve: Output = Toff * (on/off) - Toff
[1070]1560 remove: Output = Toff * (on/off) - Ton
[1348]1561 mode: the on/off detection mode
1562 'paired' (default)
1563 identifies 'off' scans by the
1564 trailing '_R' (Mopra/Parkes) or
1565 '_e'/'_w' (Tid) and matches
1566 on/off pairs from the observing pattern
[1502]1567 'time'
1568 finds the closest off in time
[1348]1569
[670]1570 """
[1348]1571 modes = ["time", "paired"]
[670]1572 if not mode in modes:
[876]1573 msg = "please provide valid mode. Valid modes are %s" % (modes)
1574 raise ValueError(msg)
1575 varlist = vars()
[1348]1576 s = None
1577 if mode.lower() == "paired":
1578 basesel = self.get_selection()
[1356]1579 sel = selector()+basesel
1580 sel.set_query("SRCTYPE==1")
1581 self.set_selection(sel)
[1348]1582 offs = self.copy()
1583 sel.set_query("SRCTYPE==0")
[1356]1584 self.set_selection(sel)
[1348]1585 ons = self.copy()
1586 s = scantable(self._math._quotient(ons, offs, preserve))
1587 self.set_selection(basesel)
1588 elif mode.lower() == "time":
1589 s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]1590 s._add_history("auto_quotient", varlist)
[876]1591 print_log()
1592 return s
[710]1593
[1145]1594 def mx_quotient(self, mask = None, weight='median', preserve=True):
[1141]1595 """
[1143]1596 Form a quotient using "off" beams when observing in "MX" mode.
1597 Parameters:
[1145]1598 mask: an optional mask to be used when weight == 'stddev'
[1143]1599 weight: How to average the off beams. Default is 'median'.
[1145]1600 preserve: you can preserve (default) the continuum or
1601 remove it. The equations used are
1602 preserve: Output = Toff * (on/off) - Toff
1603 remove: Output = Toff * (on/off) - Ton
[1217]1604 """
[1143]1605 if mask is None: mask = ()
[1141]1606 varlist = vars()
1607 on = scantable(self._math._mx_extract(self, 'on'))
[1143]1608 preoff = scantable(self._math._mx_extract(self, 'off'))
1609 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1217]1610 from asapmath import quotient
[1145]1611 q = quotient(on, off, preserve)
[1143]1612 q._add_history("mx_quotient", varlist)
[1145]1613 print_log()
[1217]1614 return q
[513]1615
[718]1616 def freq_switch(self, insitu=None):
1617 """
1618 Apply frequency switching to the data.
1619 Parameters:
1620 insitu: if False a new scantable is returned.
1621 Otherwise, the swictching is done in-situ
1622 The default is taken from .asaprc (False)
1623 Example:
1624 none
1625 """
1626 if insitu is None: insitu = rcParams['insitu']
[876]1627 self._math._setinsitu(insitu)
[718]1628 varlist = vars()
[876]1629 s = scantable(self._math._freqswitch(self))
[1118]1630 s._add_history("freq_switch", varlist)
[876]1631 print_log()
1632 if insitu: self._assign(s)
1633 else: return s
[718]1634
[780]1635 def recalc_azel(self):
1636 """
1637 Recalculate the azimuth and elevation for each position.
1638 Parameters:
1639 none
1640 Example:
1641 """
1642 varlist = vars()
[876]1643 self._recalcazel()
[780]1644 self._add_history("recalc_azel", varlist)
1645 print_log()
1646 return
1647
[513]1648 def __add__(self, other):
1649 varlist = vars()
1650 s = None
1651 if isinstance(other, scantable):
[1308]1652 s = scantable(self._math._binaryop(self, other, "ADD"))
[513]1653 elif isinstance(other, float):
[876]1654 s = scantable(self._math._unaryop(self, other, "ADD", False))
[513]1655 else:
[718]1656 raise TypeError("Other input is not a scantable or float value")
[513]1657 s._add_history("operator +", varlist)
[718]1658 print_log()
[513]1659 return s
1660
1661 def __sub__(self, other):
1662 """
1663 implicit on all axes and on Tsys
1664 """
1665 varlist = vars()
1666 s = None
1667 if isinstance(other, scantable):
[1308]1668 s = scantable(self._math._binaryop(self, other, "SUB"))
[513]1669 elif isinstance(other, float):
[876]1670 s = scantable(self._math._unaryop(self, other, "SUB", False))
[513]1671 else:
[718]1672 raise TypeError("Other input is not a scantable or float value")
[513]1673 s._add_history("operator -", varlist)
[718]1674 print_log()
[513]1675 return s
[710]1676
[513]1677 def __mul__(self, other):
1678 """
1679 implicit on all axes and on Tsys
1680 """
1681 varlist = vars()
1682 s = None
1683 if isinstance(other, scantable):
[1308]1684 s = scantable(self._math._binaryop(self, other, "MUL"))
[513]1685 elif isinstance(other, float):
[876]1686 s = scantable(self._math._unaryop(self, other, "MUL", False))
[513]1687 else:
[718]1688 raise TypeError("Other input is not a scantable or float value")
[513]1689 s._add_history("operator *", varlist)
[718]1690 print_log()
[513]1691 return s
1692
[710]1693
[513]1694 def __div__(self, other):
1695 """
1696 implicit on all axes and on Tsys
1697 """
1698 varlist = vars()
1699 s = None
1700 if isinstance(other, scantable):
[1308]1701 s = scantable(self._math._binaryop(self, other, "DIV"))
[513]1702 elif isinstance(other, float):
1703 if other == 0.0:
[718]1704 raise ZeroDivisionError("Dividing by zero is not recommended")
[876]1705 s = scantable(self._math._unaryop(self, other, "DIV", False))
[513]1706 else:
[718]1707 raise TypeError("Other input is not a scantable or float value")
[513]1708 s._add_history("operator /", varlist)
[718]1709 print_log()
[513]1710 return s
1711
[530]1712 def get_fit(self, row=0):
1713 """
1714 Print or return the stored fits for a row in the scantable
1715 Parameters:
1716 row: the row which the fit has been applied to.
1717 """
1718 if row > self.nrow():
1719 return
[976]1720 from asap.asapfit import asapfit
[530]1721 fit = asapfit(self._getfit(row))
[718]1722 if rcParams['verbose']:
[530]1723 print fit
1724 return
1725 else:
1726 return fit.as_dict()
1727
[1483]1728 def flag_nans(self):
1729 """
1730 Utility function to flag NaN values in the scantable.
1731 """
1732 import numpy
1733 basesel = self.get_selection()
1734 for i in range(self.nrow()):
1735 sel = selector()+basesel
1736 sel.set_scans(self.getscan(i))
1737 sel.set_beams(self.getbeam(i))
1738 sel.set_ifs(self.getif(i))
1739 sel.set_polarisations(self.getpol(i))
1740 self.set_selection(sel)
1741 nans = numpy.isnan(self._getspectrum(0))
1742 if numpy.any(nans):
1743 bnans = [ bool(v) for v in nans]
1744 self.flag(bnans)
1745 self.set_selection(basesel)
1746
1747
[484]1748 def _add_history(self, funcname, parameters):
[1435]1749 if not rcParams['scantable.history']:
1750 return
[484]1751 # create date
1752 sep = "##"
1753 from datetime import datetime
1754 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1755 hist = dstr+sep
1756 hist += funcname+sep#cdate+sep
1757 if parameters.has_key('self'): del parameters['self']
[1118]1758 for k, v in parameters.iteritems():
[484]1759 if type(v) is dict:
[1118]1760 for k2, v2 in v.iteritems():
[484]1761 hist += k2
1762 hist += "="
[1118]1763 if isinstance(v2, scantable):
[484]1764 hist += 'scantable'
1765 elif k2 == 'mask':
[1118]1766 if isinstance(v2, list) or isinstance(v2, tuple):
[513]1767 hist += str(self._zip_mask(v2))
1768 else:
1769 hist += str(v2)
[484]1770 else:
[513]1771 hist += str(v2)
[484]1772 else:
1773 hist += k
1774 hist += "="
[1118]1775 if isinstance(v, scantable):
[484]1776 hist += 'scantable'
1777 elif k == 'mask':
[1118]1778 if isinstance(v, list) or isinstance(v, tuple):
[513]1779 hist += str(self._zip_mask(v))
1780 else:
1781 hist += str(v)
[484]1782 else:
1783 hist += str(v)
1784 hist += sep
1785 hist = hist[:-2] # remove trailing '##'
1786 self._addhistory(hist)
1787
[710]1788
[484]1789 def _zip_mask(self, mask):
1790 mask = list(mask)
1791 i = 0
1792 segments = []
1793 while mask[i:].count(1):
1794 i += mask[i:].index(1)
1795 if mask[i:].count(0):
1796 j = i + mask[i:].index(0)
1797 else:
[710]1798 j = len(mask)
[1118]1799 segments.append([i, j])
[710]1800 i = j
[484]1801 return segments
[714]1802
[626]1803 def _get_ordinate_label(self):
1804 fu = "("+self.get_fluxunit()+")"
1805 import re
1806 lbl = "Intensity"
[1118]1807 if re.match(".K.", fu):
[626]1808 lbl = "Brightness Temperature "+ fu
[1118]1809 elif re.match(".Jy.", fu):
[626]1810 lbl = "Flux density "+ fu
1811 return lbl
[710]1812
[876]1813 def _check_ifs(self):
1814 nchans = [self.nchan(i) for i in range(self.nif(-1))]
[889]1815 nchans = filter(lambda t: t > 0, nchans)
[876]1816 return (sum(nchans)/len(nchans) == nchans[0])
[976]1817
1818 def _fill(self, names, unit, average):
1819 import os
1820 from asap._asap import stfiller
1821 first = True
1822 fullnames = []
1823 for name in names:
1824 name = os.path.expandvars(name)
1825 name = os.path.expanduser(name)
1826 if not os.path.exists(name):
1827 msg = "File '%s' does not exists" % (name)
1828 if rcParams['verbose']:
1829 asaplog.push(msg)
1830 print asaplog.pop().strip()
1831 return
1832 raise IOError(msg)
1833 fullnames.append(name)
1834 if average:
1835 asaplog.push('Auto averaging integrations')
[1079]1836 stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]1837 for name in fullnames:
[1073]1838 tbl = Scantable(stype)
1839 r = stfiller(tbl)
[1504]1840 rx = rcParams['scantable.reference']
1841 r._setreferenceexpr(rx)
[976]1842 msg = "Importing %s..." % (name)
[1118]1843 asaplog.push(msg, False)
[976]1844 print_log()
[1118]1845 r._open(name, -1, -1)
[976]1846 r._read()
1847 if average:
[1118]1848 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]1849 if not first:
1850 tbl = self._math._merge([self, tbl])
1851 Scantable.__init__(self, tbl)
1852 r._close()
[1118]1853 del r, tbl
[976]1854 first = False
1855 if unit is not None:
1856 self.set_fluxunit(unit)
1857 self.set_freqframe(rcParams['scantable.freqframe'])
1858
[1402]1859 def __getitem__(self, key):
1860 if key < 0:
1861 key += self.nrow()
1862 if key >= self.nrow():
1863 raise IndexError("Row index out of range.")
1864 return self._getspectrum(key)
1865
1866 def __setitem__(self, key, value):
1867 if key < 0:
1868 key += self.nrow()
1869 if key >= self.nrow():
1870 raise IndexError("Row index out of range.")
1871 if not hasattr(value, "__len__") or \
1872 len(value) > self.nchan(self.getif(key)):
1873 raise ValueError("Spectrum length doesn't match.")
1874 return self._setspectrum(value, key)
1875
1876 def __len__(self):
1877 return self.nrow()
1878
1879 def __iter__(self):
1880 for i in range(len(self)):
1881 yield self[i]
Note: See TracBrowser for help on using the repository browser.