source: trunk/python/scantable.py@ 1711

Last change on this file since 1711 was 1697, checked in by Malte Marquarding, 15 years ago

Some minor tidy up and documentation

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 70.6 KB
RevLine 
[1697]1import os
[1691]2try:
3 from functools import wraps as wraps_dec
4except ImportError:
5 from asap.compatibility import wraps as wraps_dec
6
[876]7from asap._asap import Scantable
[226]8from asap import rcParams
[1589]9from asap import print_log, print_log_dec
[1118]10from asap import asaplog
[946]11from asap import selector
[1153]12from asap import linecatalog
[1600]13from asap.coordinate import coordinate
[1295]14from asap import _n_bools, mask_not, mask_and, mask_or
[102]15
[1689]16
17def preserve_selection(func):
[1691]18 @wraps_dec(func)
[1689]19 def wrap(obj, *args, **kw):
20 basesel = obj.get_selection()
21 val = func(obj, *args, **kw)
22 obj.set_selection(basesel)
23 return val
24 return wrap
25
26
[1697]27def is_scantable(filename):
28 return (os.path.isdir(filename)
29 and not os.path.exists(filename+'/table.f1')
30 and os.path.exists(filename+'/table.info'))
31
32
[876]33class scantable(Scantable):
[102]34 """
35 The ASAP container for scans
36 """
[1589]37 @print_log_dec
[1586]38 def __init__(self, filename, average=None, unit=None, parallactify=None):
[102]39 """
40 Create a scantable from a saved one or make a reference
41 Parameters:
[181]42 filename: the name of an asap table on disk
43 or
44 the name of a rpfits/sdfits/ms file
45 (integrations within scans are auto averaged
46 and the whole file is read)
47 or
48 [advanced] a reference to an existing
[102]49 scantable
[484]50 average: average all integrations withinb a scan on read.
51 The default (True) is taken from .asaprc.
52 unit: brightness unit; must be consistent with K or Jy.
[340]53 Over-rides the default selected by the reader
54 (input rpfits/sdfits/ms) or replaces the value
55 in existing scantables
[1586]56 parallactify: Indcicate that the data had been parallatified.
57 Default is taken form rc file.
[710]58 """
[976]59 if average is None:
[710]60 average = rcParams['scantable.autoaverage']
[1593]61 parallactify = parallactify or rcParams['scantable.parallactify']
[1259]62 varlist = vars()
[876]63 from asap._asap import stmath
64 self._math = stmath()
65 if isinstance(filename, Scantable):
66 Scantable.__init__(self, filename)
[181]67 else:
[1697]68 if isinstance(filename, str):
[976]69 filename = os.path.expandvars(filename)
70 filename = os.path.expanduser(filename)
71 if not os.path.exists(filename):
72 s = "File '%s' not found." % (filename)
[718]73 if rcParams['verbose']:
[976]74 asaplog.push(s)
[718]75 return
[976]76 raise IOError(s)
[1697]77 if is_scantable(filename):
78 ondisk = rcParams['scantable.storage'] == 'disk'
79 Scantable.__init__(self, filename, ondisk)
80 if unit is not None:
81 self.set_fluxunit(unit)
82 self.set_freqframe(rcParams['scantable.freqframe'])
[226]83 else:
[1118]84 self._fill([filename], unit, average)
85 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]86 and isinstance(filename[-1], str):
87 self._fill(filename, unit, average)
[1586]88 self.parallactify(parallactify)
[1259]89 self._add_history("scantable", varlist)
[102]90
[1589]91 @print_log_dec
[876]92 def save(self, name=None, format=None, overwrite=False):
[116]93 """
[1280]94 Store the scantable on disk. This can be an asap (aips++) Table,
95 SDFITS or MS2 format.
[116]96 Parameters:
[1093]97 name: the name of the outputfile. For format "ASCII"
98 this is the root file name (data in 'name'.txt
[497]99 and header in 'name'_header.txt)
[116]100 format: an optional file format. Default is ASAP.
[280]101 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
[194]102 'SDFITS' (save as SDFITS file)
[200]103 'ASCII' (saves as ascii text file)
[226]104 'MS2' (saves as an aips++
105 MeasurementSet V2)
[1573]106 'FITS' (save as image FITS - not
[1443]107 readable by class)
108 'CLASS' (save as FITS readable by CLASS)
[411]109 overwrite: If the file should be overwritten if it exists.
[256]110 The default False is to return with warning
[411]111 without writing the output. USE WITH CARE.
[116]112 Example:
113 scan.save('myscan.asap')
[1118]114 scan.save('myscan.sdfits', 'SDFITS')
[116]115 """
[411]116 from os import path
[1593]117 format = format or rcParams['scantable.save']
[256]118 suffix = '.'+format.lower()
[1118]119 if name is None or name == "":
[256]120 name = 'scantable'+suffix
[718]121 msg = "No filename given. Using default name %s..." % name
122 asaplog.push(msg)
[411]123 name = path.expandvars(name)
[256]124 if path.isfile(name) or path.isdir(name):
125 if not overwrite:
[718]126 msg = "File %s exists." % name
127 if rcParams['verbose']:
128 print msg
129 return
130 else:
131 raise IOError(msg)
[451]132 format2 = format.upper()
133 if format2 == 'ASAP':
[116]134 self._save(name)
135 else:
[989]136 from asap._asap import stwriter as stw
[1118]137 writer = stw(format2)
138 writer.write(self, name)
[116]139 return
140
[102]141 def copy(self):
142 """
143 Return a copy of this scantable.
[1348]144 Note:
145 This makes a full (deep) copy. scan2 = scan1 makes a reference.
[102]146 Parameters:
[113]147 none
[102]148 Example:
149 copiedscan = scan.copy()
150 """
[876]151 sd = scantable(Scantable._copy(self))
[113]152 return sd
153
[1093]154 def drop_scan(self, scanid=None):
155 """
156 Return a new scantable where the specified scan number(s) has(have)
157 been dropped.
158 Parameters:
159 scanid: a (list of) scan number(s)
160 """
161 from asap import _is_sequence_or_number as _is_valid
162 from asap import _to_list
163 from asap import unique
164 if not _is_valid(scanid):
165 if rcParams['verbose']:
166 print "Please specify a scanno to drop from the scantable"
167 return
168 else:
169 raise RuntimeError("No scan given")
170 try:
171 scanid = _to_list(scanid)
172 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
173 for sid in scanid: allscans.remove(sid)
[1118]174 if len(allscans) == 0:
175 raise ValueError("Can't remove all scans")
[1093]176 except ValueError:
177 if rcParams['verbose']:
178 print "Couldn't find any match."
179 return
180 else: raise
181 try:
[1593]182 sel = selector(scans=allscans)
[1594]183 return self._select_copy(sel)
[1093]184 except RuntimeError:
[1118]185 if rcParams['verbose']:
186 print "Couldn't find any match."
187 else:
188 raise
[1093]189
[1594]190 def _select_copy(self, selection):
191 orig = self.get_selection()
192 self.set_selection(orig+selection)
193 cp = self.copy()
194 self.set_selection(orig)
195 return cp
196
[102]197 def get_scan(self, scanid=None):
198 """
199 Return a specific scan (by scanno) or collection of scans (by
200 source name) in a new scantable.
[1348]201 Note:
202 See scantable.drop_scan() for the inverse operation.
[102]203 Parameters:
[513]204 scanid: a (list of) scanno or a source name, unix-style
205 patterns are accepted for source name matching, e.g.
206 '*_R' gets all 'ref scans
[102]207 Example:
[513]208 # get all scans containing the source '323p459'
209 newscan = scan.get_scan('323p459')
210 # get all 'off' scans
211 refscans = scan.get_scan('*_R')
212 # get a susbset of scans by scanno (as listed in scan.summary())
[1118]213 newscan = scan.get_scan([0, 2, 7, 10])
[102]214 """
215 if scanid is None:
[718]216 if rcParams['verbose']:
[1118]217 print "Please specify a scan no or name to " \
218 "retrieve from the scantable"
[718]219 return
220 else:
221 raise RuntimeError("No scan given")
222
[102]223 try:
[946]224 bsel = self.get_selection()
225 sel = selector()
[102]226 if type(scanid) is str:
[946]227 sel.set_name(scanid)
[1594]228 return self._select_copy(sel)
[102]229 elif type(scanid) is int:
[946]230 sel.set_scans([scanid])
[1594]231 return self._select_copy(sel)
[381]232 elif type(scanid) is list:
[946]233 sel.set_scans(scanid)
[1594]234 return self._select_copy(sel)
[381]235 else:
[718]236 msg = "Illegal scanid type, use 'int' or 'list' if ints."
237 if rcParams['verbose']:
238 print msg
239 else:
240 raise TypeError(msg)
[102]241 except RuntimeError:
[718]242 if rcParams['verbose']: print "Couldn't find any match."
243 else: raise
[102]244
245 def __str__(self):
[1118]246 return Scantable._summary(self, True)
[102]247
[976]248 def summary(self, filename=None):
[102]249 """
250 Print a summary of the contents of this scantable.
251 Parameters:
252 filename: the name of a file to write the putput to
253 Default - no file output
254 """
[976]255 info = Scantable._summary(self, True)
[102]256 if filename is not None:
[256]257 if filename is "":
258 filename = 'scantable_summary.txt'
[415]259 from os.path import expandvars, isdir
[411]260 filename = expandvars(filename)
[415]261 if not isdir(filename):
[413]262 data = open(filename, 'w')
263 data.write(info)
264 data.close()
265 else:
[718]266 msg = "Illegal file name '%s'." % (filename)
267 if rcParams['verbose']:
268 print msg
269 else:
270 raise IOError(msg)
271 if rcParams['verbose']:
[794]272 try:
273 from IPython.genutils import page as pager
274 except ImportError:
275 from pydoc import pager
276 pager(info)
[718]277 else:
278 return info
[710]279
[1512]280 def get_spectrum(self, rowno):
[1471]281 """Return the spectrum for the current row in the scantable as a list.
282 Parameters:
[1573]283 rowno: the row number to retrieve the spectrum from
[1471]284 """
285 return self._getspectrum(rowno)
[946]286
[1471]287 def get_mask(self, rowno):
288 """Return the mask for the current row in the scantable as a list.
289 Parameters:
[1573]290 rowno: the row number to retrieve the mask from
[1471]291 """
292 return self._getmask(rowno)
293
294 def set_spectrum(self, spec, rowno):
295 """Return the spectrum for the current row in the scantable as a list.
296 Parameters:
297 spec: the spectrum
[1573]298 rowno: the row number to set the spectrum for
[1471]299 """
300 assert(len(spec) == self.nchan())
301 return self._setspectrum(spec, rowno)
302
[1600]303 def get_coordinate(self, rowno):
304 """Return the (spectral) coordinate for a a given 'rowno'.
305 NOTE:
306 * This coordinate is only valid until a scantable method modifies
307 the frequency axis.
308 * This coordinate does contain the original frequency set-up
309 NOT the new frame. The conversions however are done using the user
310 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
311 use scantable.freq_align first. Without it there is no closure,
312 i.e.
313 c = myscan.get_coordinate(0)
314 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
315
316 Parameters:
317 rowno: the row number for the spectral coordinate
318
319 """
320 return coordinate(Scantable.get_coordinate(self, rowno))
321
[946]322 def get_selection(self):
323 """
[1005]324 Get the selection object currently set on this scantable.
325 Parameters:
326 none
327 Example:
328 sel = scan.get_selection()
329 sel.set_ifs(0) # select IF 0
330 scan.set_selection(sel) # apply modified selection
[946]331 """
332 return selector(self._getselection())
333
[1576]334 def set_selection(self, selection=None, **kw):
[946]335 """
[1005]336 Select a subset of the data. All following operations on this scantable
337 are only applied to thi selection.
338 Parameters:
[1697]339 selection: a selector object (default unset the selection),
340
341 or
342
343 any combination of
344 "pols", "ifs", "beams", "scans", "cycles", "name", "query"
345
[1005]346 Examples:
347 sel = selector() # create a selection object
[1118]348 self.set_scans([0, 3]) # select SCANNO 0 and 3
[1005]349 scan.set_selection(sel) # set the selection
350 scan.summary() # will only print summary of scanno 0 an 3
351 scan.set_selection() # unset the selection
[1697]352 # or the equivalent
353 scan.set_selection(scans=[0,3])
354 scan.summary() # will only print summary of scanno 0 an 3
355 scan.set_selection() # unset the selection
[946]356 """
[1576]357 if selection is None:
358 # reset
359 if len(kw) == 0:
360 selection = selector()
361 else:
362 # try keywords
363 for k in kw:
364 if k not in selector.fields:
365 raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
366 selection = selector(**kw)
[946]367 self._setselection(selection)
368
[876]369 def stats(self, stat='stddev', mask=None):
[102]370 """
[135]371 Determine the specified statistic of the current beam/if/pol
[102]372 Takes a 'mask' as an optional parameter to specify which
373 channels should be excluded.
374 Parameters:
[135]375 stat: 'min', 'max', 'sumsq', 'sum', 'mean'
376 'var', 'stddev', 'avdev', 'rms', 'median'
377 mask: an optional mask specifying where the statistic
[102]378 should be determined.
379 Example:
[113]380 scan.set_unit('channel')
[1118]381 msk = scan.create_mask([100, 200], [500, 600])
[135]382 scan.stats(stat='mean', mask=m)
[102]383 """
[1593]384 mask = mask or []
[876]385 if not self._check_ifs():
[1118]386 raise ValueError("Cannot apply mask as the IFs have different "
387 "number of channels. Please use setselection() "
388 "to select individual IFs")
[256]389
[876]390 statvals = self._math._stats(self, mask, stat)
[1590]391 def cb(i):
392 return statvals[i]
[256]393
[1590]394 return self._row_callback(cb, stat)
[102]395
[1118]396 def stddev(self, mask=None):
[135]397 """
398 Determine the standard deviation of the current beam/if/pol
399 Takes a 'mask' as an optional parameter to specify which
400 channels should be excluded.
401 Parameters:
402 mask: an optional mask specifying where the standard
403 deviation should be determined.
404
405 Example:
406 scan.set_unit('channel')
[1118]407 msk = scan.create_mask([100, 200], [500, 600])
[135]408 scan.stddev(mask=m)
409 """
[1118]410 return self.stats(stat='stddev', mask=mask);
[135]411
[1003]412
[1259]413 def get_column_names(self):
[1003]414 """
415 Return a list of column names, which can be used for selection.
416 """
[1259]417 return list(Scantable.get_column_names(self))
[1003]418
[876]419 def get_tsys(self):
[113]420 """
421 Return the System temperatures.
422 Returns:
[876]423 a list of Tsys values for the current selection
[113]424 """
[256]425
[876]426 return self._row_callback(self._gettsys, "Tsys")
[256]427
[876]428 def _row_callback(self, callback, label):
429 out = ""
[1118]430 outvec = []
[1590]431 sep = '-'*50
[876]432 for i in range(self.nrow()):
433 tm = self._gettime(i)
434 src = self._getsourcename(i)
[1590]435 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
[876]436 out += 'Time[%s]:\n' % (tm)
[1590]437 if self.nbeam(-1) > 1:
438 out += ' Beam[%d] ' % (self.getbeam(i))
439 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
440 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
[876]441 outvec.append(callback(i))
442 out += '= %3.3f\n' % (outvec[i])
[1590]443 out += sep+'\n'
[876]444 if rcParams['verbose']:
[1590]445 print sep
[876]446 print " %s" % (label)
[1590]447 print sep
[876]448 print out
[1175]449 return outvec
[256]450
[1070]451 def _get_column(self, callback, row=-1):
452 """
453 """
454 if row == -1:
455 return [callback(i) for i in range(self.nrow())]
456 else:
457 if 0 <= row < self.nrow():
458 return callback(row)
[256]459
[1070]460
[1348]461 def get_time(self, row=-1, asdatetime=False):
[113]462 """
463 Get a list of time stamps for the observations.
[1348]464 Return a datetime object for each integration time stamp in the scantable.
[113]465 Parameters:
[1348]466 row: row no of integration. Default -1 return all rows
467 asdatetime: return values as datetime objects rather than strings
[113]468 Example:
469 none
470 """
[1175]471 from time import strptime
472 from datetime import datetime
[1392]473 times = self._get_column(self._gettime, row)
[1348]474 if not asdatetime:
[1392]475 return times
[1175]476 format = "%Y/%m/%d/%H:%M:%S"
477 if isinstance(times, list):
478 return [datetime(*strptime(i, format)[:6]) for i in times]
479 else:
480 return datetime(*strptime(times, format)[:6])
[102]481
[1348]482
483 def get_inttime(self, row=-1):
484 """
485 Get a list of integration times for the observations.
486 Return a time in seconds for each integration in the scantable.
487 Parameters:
488 row: row no of integration. Default -1 return all rows.
489 Example:
490 none
491 """
[1573]492 return self._get_column(self._getinttime, row)
[1348]493
[1573]494
[714]495 def get_sourcename(self, row=-1):
496 """
[794]497 Get a list source names for the observations.
[714]498 Return a string for each integration in the scantable.
499 Parameters:
[1348]500 row: row no of integration. Default -1 return all rows.
[714]501 Example:
502 none
503 """
[1070]504 return self._get_column(self._getsourcename, row)
[714]505
[794]506 def get_elevation(self, row=-1):
507 """
508 Get a list of elevations for the observations.
509 Return a float for each integration in the scantable.
510 Parameters:
[1348]511 row: row no of integration. Default -1 return all rows.
[794]512 Example:
513 none
514 """
[1070]515 return self._get_column(self._getelevation, row)
[794]516
517 def get_azimuth(self, row=-1):
518 """
519 Get a list of azimuths for the observations.
520 Return a float for each integration in the scantable.
521 Parameters:
[1348]522 row: row no of integration. Default -1 return all rows.
[794]523 Example:
524 none
525 """
[1070]526 return self._get_column(self._getazimuth, row)
[794]527
528 def get_parangle(self, row=-1):
529 """
530 Get a list of parallactic angles for the observations.
531 Return a float for each integration in the scantable.
532 Parameters:
[1348]533 row: row no of integration. Default -1 return all rows.
[794]534 Example:
535 none
536 """
[1070]537 return self._get_column(self._getparangle, row)
[794]538
[1070]539 def get_direction(self, row=-1):
540 """
541 Get a list of Positions on the sky (direction) for the observations.
[1594]542 Return a string for each integration in the scantable.
[1070]543 Parameters:
544 row: row no of integration. Default -1 return all rows
545 Example:
546 none
547 """
548 return self._get_column(self._getdirection, row)
549
[1391]550 def get_directionval(self, row=-1):
551 """
552 Get a list of Positions on the sky (direction) for the observations.
553 Return a float for each integration in the scantable.
554 Parameters:
555 row: row no of integration. Default -1 return all rows
556 Example:
557 none
558 """
559 return self._get_column(self._getdirectionvec, row)
560
561
[102]562 def set_unit(self, unit='channel'):
563 """
564 Set the unit for all following operations on this scantable
565 Parameters:
566 unit: optional unit, default is 'channel'
[1118]567 one of '*Hz', 'km/s', 'channel', ''
[102]568 """
[484]569 varlist = vars()
[1118]570 if unit in ['', 'pixel', 'channel']:
[113]571 unit = ''
572 inf = list(self._getcoordinfo())
573 inf[0] = unit
574 self._setcoordinfo(inf)
[1118]575 self._add_history("set_unit", varlist)
[113]576
[1589]577 @print_log_dec
[484]578 def set_instrument(self, instr):
[358]579 """
[1348]580 Set the instrument for subsequent processing.
[358]581 Parameters:
[710]582 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]583 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[358]584 """
585 self._setInstrument(instr)
[1118]586 self._add_history("set_instument", vars())
[358]587
[1589]588 @print_log_dec
[1190]589 def set_feedtype(self, feedtype):
590 """
591 Overwrite the feed type, which might not be set correctly.
592 Parameters:
593 feedtype: 'linear' or 'circular'
594 """
595 self._setfeedtype(feedtype)
596 self._add_history("set_feedtype", vars())
597
[1589]598 @print_log_dec
[276]599 def set_doppler(self, doppler='RADIO'):
600 """
601 Set the doppler for all following operations on this scantable.
602 Parameters:
603 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
604 """
[484]605 varlist = vars()
[276]606 inf = list(self._getcoordinfo())
607 inf[2] = doppler
608 self._setcoordinfo(inf)
[1118]609 self._add_history("set_doppler", vars())
[710]610
[1589]611 @print_log_dec
[226]612 def set_freqframe(self, frame=None):
[113]613 """
614 Set the frame type of the Spectral Axis.
615 Parameters:
[591]616 frame: an optional frame type, default 'LSRK'. Valid frames are:
[1118]617 'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
618 'GEO', 'GALACTO', 'LGROUP', 'CMB'
[113]619 Examples:
620 scan.set_freqframe('BARY')
621 """
[1593]622 frame = frame or rcParams['scantable.freqframe']
[484]623 varlist = vars()
[1118]624 valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
625 'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]626
[989]627 if frame in valid:
[113]628 inf = list(self._getcoordinfo())
629 inf[1] = frame
630 self._setcoordinfo(inf)
[1118]631 self._add_history("set_freqframe", varlist)
[102]632 else:
[1118]633 msg = "Please specify a valid freq type. Valid types are:\n", valid
[718]634 if rcParams['verbose']:
635 print msg
636 else:
637 raise TypeError(msg)
[710]638
[989]639 def set_dirframe(self, frame=""):
640 """
641 Set the frame type of the Direction on the sky.
642 Parameters:
643 frame: an optional frame type, default ''. Valid frames are:
644 'J2000', 'B1950', 'GALACTIC'
645 Examples:
646 scan.set_dirframe('GALACTIC')
647 """
648 varlist = vars()
649 try:
650 Scantable.set_dirframe(self, frame)
[1118]651 except RuntimeError, msg:
[989]652 if rcParams['verbose']:
653 print msg
654 else:
655 raise
[1118]656 self._add_history("set_dirframe", varlist)
[989]657
[113]658 def get_unit(self):
659 """
660 Get the default unit set in this scantable
661 Returns:
662 A unit string
663 """
664 inf = self._getcoordinfo()
665 unit = inf[0]
666 if unit == '': unit = 'channel'
667 return unit
[102]668
[158]669 def get_abcissa(self, rowno=0):
[102]670 """
[158]671 Get the abcissa in the current coordinate setup for the currently
[113]672 selected Beam/IF/Pol
673 Parameters:
[226]674 rowno: an optional row number in the scantable. Default is the
675 first row, i.e. rowno=0
[113]676 Returns:
[1348]677 The abcissa values and the format string (as a dictionary)
[113]678 """
[256]679 abc = self._getabcissa(rowno)
[710]680 lbl = self._getabcissalabel(rowno)
[158]681 return abc, lbl
[113]682
[1118]683 def flag(self, mask=None):
[1001]684 """
685 Flag the selected data using an optional channel mask.
686 Parameters:
687 mask: an optional channel mask, created with create_mask. Default
688 (no mask) is all channels.
689 """
690 varlist = vars()
[1593]691 mask = mask or []
[1001]692 try:
693 self._flag(mask)
[1118]694 except RuntimeError, msg:
[1001]695 if rcParams['verbose']:
696 print msg
697 return
698 else: raise
699 self._add_history("flag", varlist)
700
[1589]701 @print_log_dec
[1584]702 def lag_flag(self, start, end, unit="MHz", insitu=None):
[1192]703 """
704 Flag the data in 'lag' space by providing a frequency to remove.
[1584]705 Flagged data in the scantable gets interpolated over the region.
[1192]706 No taper is applied.
707 Parameters:
[1579]708 start: the start frequency (really a period within the
709 bandwidth) or period to remove
710 end: the end frequency or period to remove
[1584]711 unit: the frequency unit (default "MHz") or "" for
[1579]712 explicit lag channels
[1203]713 Notes:
[1579]714 It is recommended to flag edges of the band or strong
[1348]715 signals beforehand.
[1192]716 """
717 if insitu is None: insitu = rcParams['insitu']
718 self._math._setinsitu(insitu)
719 varlist = vars()
[1579]720 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
721 if not (unit == "" or base.has_key(unit)):
[1192]722 raise ValueError("%s is not a valid unit." % unit)
723 try:
[1579]724 if unit == "":
725 s = scantable(self._math._lag_flag(self, start, end, "lags"))
726 else:
727 s = scantable(self._math._lag_flag(self, start*base[unit],
728 end*base[unit], "frequency"))
[1192]729 except RuntimeError, msg:
730 if rcParams['verbose']:
731 print msg
732 return
733 else: raise
734 s._add_history("lag_flag", varlist)
735 if insitu:
736 self._assign(s)
737 else:
738 return s
[1001]739
[1589]740 @print_log_dec
[113]741 def create_mask(self, *args, **kwargs):
742 """
[1118]743 Compute and return a mask based on [min, max] windows.
[189]744 The specified windows are to be INCLUDED, when the mask is
[113]745 applied.
[102]746 Parameters:
[1118]747 [min, max], [min2, max2], ...
[1024]748 Pairs of start/end points (inclusive)specifying the regions
[102]749 to be masked
[189]750 invert: optional argument. If specified as True,
751 return an inverted mask, i.e. the regions
752 specified are EXCLUDED
[513]753 row: create the mask using the specified row for
754 unit conversions, default is row=0
755 only necessary if frequency varies over rows.
[102]756 Example:
[113]757 scan.set_unit('channel')
758 a)
[1118]759 msk = scan.create_mask([400, 500], [800, 900])
[189]760 # masks everything outside 400 and 500
[113]761 # and 800 and 900 in the unit 'channel'
762
763 b)
[1118]764 msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]765 # masks the regions between 400 and 500
[113]766 # and 800 and 900 in the unit 'channel'
[1024]767 c)
768 mask only channel 400
[1554]769 msk = scan.create_mask([400])
[102]770 """
[1554]771 row = kwargs.get("row", 0)
[513]772 data = self._getabcissa(row)
[113]773 u = self._getcoordinfo()[0]
[718]774 if rcParams['verbose']:
[113]775 if u == "": u = "channel"
[718]776 msg = "The current mask window unit is %s" % u
[1118]777 i = self._check_ifs()
778 if not i:
[876]779 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
[718]780 asaplog.push(msg)
[102]781 n = self.nchan()
[1295]782 msk = _n_bools(n, False)
[710]783 # test if args is a 'list' or a 'normal *args - UGLY!!!
784
[1118]785 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
786 and args or args[0]
[710]787 for window in ws:
[1554]788 if len(window) == 1:
789 window = [window[0], window[0]]
790 if len(window) == 0 or len(window) > 2:
791 raise ValueError("A window needs to be defined as [start(, end)]")
[1545]792 if window[0] > window[1]:
793 tmp = window[0]
794 window[0] = window[1]
795 window[1] = tmp
[102]796 for i in range(n):
[1024]797 if data[i] >= window[0] and data[i] <= window[1]:
[1295]798 msk[i] = True
[113]799 if kwargs.has_key('invert'):
800 if kwargs.get('invert'):
[1295]801 msk = mask_not(msk)
[102]802 return msk
[710]803
[256]804 def get_restfreqs(self):
805 """
806 Get the restfrequency(s) stored in this scantable.
807 The return value(s) are always of unit 'Hz'
808 Parameters:
809 none
810 Returns:
811 a list of doubles
812 """
813 return list(self._getrestfreqs())
[102]814
[402]815
[931]816 def set_restfreqs(self, freqs=None, unit='Hz'):
817 """
818 Set or replace the restfrequency specified and
819 If the 'freqs' argument holds a scalar,
820 then that rest frequency will be applied to all the selected
821 data. If the 'freqs' argument holds
822 a vector, then it MUST be of equal or smaller length than
823 the number of IFs (and the available restfrequencies will be
824 replaced by this vector). In this case, *all* data have
825 the restfrequency set per IF according
826 to the corresponding value you give in the 'freqs' vector.
[1118]827 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
[931]828 IF 1 gets restfreq 2e9.
[1395]829 You can also specify the frequencies via a linecatalog.
[1153]830
[931]831 Parameters:
832 freqs: list of rest frequency values or string idenitfiers
833 unit: unit for rest frequency (default 'Hz')
[402]834
[931]835 Example:
836 # set the given restfrequency for the whole table
837 scan.set_restfreqs(freqs=1.4e9)
[1348]838 # If thee number of IFs in the data is >= 2 IF0 gets the first
[931]839 # value IF1 the second...
[1118]840 scan.set_restfreqs(freqs=[1.4e9, 1.67e9])
[931]841 #set the given restfrequency for the whole table (by name)
842 scan.set_restfreqs(freqs="OH1667")
[391]843
[931]844 Note:
845 To do more sophisticate Restfrequency setting, e.g. on a
846 source and IF basis, use scantable.set_selection() before using
847 this function.
[1593]848 # provide your scantable is called scan
[931]849 selection = selector()
850 selection.set_name("ORION*")
851 selection.set_ifs([1])
852 scan.set_selection(selection)
853 scan.set_restfreqs(freqs=86.6e9)
854
855 """
856 varlist = vars()
[1157]857 from asap import linecatalog
858 # simple value
[1118]859 if isinstance(freqs, int) or isinstance(freqs, float):
[1153]860 self._setrestfreqs(freqs, "",unit)
[1157]861 # list of values
[1118]862 elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]863 # list values are scalars
[1118]864 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[931]865 sel = selector()
866 savesel = self._getselection()
[1322]867 iflist = self.getifnos()
[931]868 for i in xrange(len(freqs)):
[1322]869 sel.set_ifs(iflist[i])
[931]870 self._setselection(sel)
[1153]871 self._setrestfreqs(freqs[i], "",unit)
[931]872 self._setselection(savesel)
[1157]873 # list values are tuples, (value, name)
874 elif isinstance(freqs[-1], dict):
875 sel = selector()
876 savesel = self._getselection()
[1322]877 iflist = self.getifnos()
[1157]878 for i in xrange(len(freqs)):
[1322]879 sel.set_ifs(iflist[i])
[1259]880 self._setselection(sel)
[1157]881 self._setrestfreqs(freqs[i]["value"],
882 freqs[i]["name"], "MHz")
883 self._setselection(savesel)
884 # freqs are to be taken from a linecatalog
[1153]885 elif isinstance(freqs, linecatalog):
886 sel = selector()
887 savesel = self._getselection()
[1322]888 iflist = self.getifnos()
[1153]889 for i in xrange(freqs.nrow()):
[1322]890 sel.set_ifs(iflist[i])
[1153]891 self._setselection(sel)
892 self._setrestfreqs(freqs.get_frequency(i),
893 freqs.get_name(i), "MHz")
894 # ensure that we are not iterating past nIF
895 if i == self.nif()-1: break
896 self._setselection(savesel)
[931]897 else:
898 return
899 self._add_history("set_restfreqs", varlist)
900
[1360]901 def shift_refpix(self, delta):
[1589]902 """
903 Shift the reference pixel of the Spectra Coordinate by an
904 integer amount.
905 Parameters:
906 delta: the amount to shift by
[1360]907 Note:
[1589]908 Be careful using this with broadband data.
[1360]909 """
[1589]910 Scantable.shift(self, delta)
[931]911
[1259]912 def history(self, filename=None):
913 """
914 Print the history. Optionally to a file.
[1348]915 Parameters:
916 filename: The name of the file to save the history to.
[1259]917 """
[484]918 hist = list(self._gethistory())
[794]919 out = "-"*80
[484]920 for h in hist:
[489]921 if h.startswith("---"):
[794]922 out += "\n"+h
[489]923 else:
924 items = h.split("##")
925 date = items[0]
926 func = items[1]
927 items = items[2:]
[794]928 out += "\n"+date+"\n"
929 out += "Function: %s\n Parameters:" % (func)
[489]930 for i in items:
931 s = i.split("=")
[1118]932 out += "\n %s = %s" % (s[0], s[1])
[794]933 out += "\n"+"-"*80
[1259]934 if filename is not None:
935 if filename is "":
936 filename = 'scantable_history.txt'
937 import os
938 filename = os.path.expandvars(os.path.expanduser(filename))
939 if not os.path.isdir(filename):
940 data = open(filename, 'w')
941 data.write(out)
942 data.close()
943 else:
944 msg = "Illegal file name '%s'." % (filename)
945 if rcParams['verbose']:
946 print msg
947 else:
948 raise IOError(msg)
949 if rcParams['verbose']:
950 try:
951 from IPython.genutils import page as pager
952 except ImportError:
953 from pydoc import pager
954 pager(out)
955 else:
956 return out
[484]957 return
[513]958 #
959 # Maths business
960 #
[1589]961 @print_log_dec
[931]962 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
[513]963 """
[1070]964 Return the (time) weighted average of a scan.
[513]965 Note:
[1070]966 in channels only - align if necessary
[513]967 Parameters:
968 mask: an optional mask (only used for 'var' and 'tsys'
969 weighting)
[558]970 scanav: True averages each scan separately
971 False (default) averages all scans together,
[1099]972 weight: Weighting scheme.
973 'none' (mean no weight)
974 'var' (1/var(spec) weighted)
975 'tsys' (1/Tsys**2 weighted)
976 'tint' (integration time weighted)
977 'tintsys' (Tint/Tsys**2)
978 'median' ( median averaging)
[535]979 The default is 'tint'
[931]980 align: align the spectra in velocity before averaging. It takes
981 the time of the first spectrum as reference time.
[513]982 Example:
983 # time average the scantable without using a mask
[710]984 newscan = scan.average_time()
[513]985 """
986 varlist = vars()
[1593]987 weight = weight or 'TINT'
988 mask = mask or ()
989 scanav = (scanav and 'SCAN') or 'NONE'
[1118]990 scan = (self, )
[989]991 try:
[1118]992 if align:
993 scan = (self.freq_align(insitu=False), )
994 s = None
995 if weight.upper() == 'MEDIAN':
996 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
997 scanav))
998 else:
999 s = scantable(self._math._average(scan, mask, weight.upper(),
1000 scanav))
1001 except RuntimeError, msg:
[989]1002 if rcParams['verbose']:
1003 print msg
1004 return
1005 else: raise
[1099]1006 s._add_history("average_time", varlist)
[513]1007 return s
[710]1008
[1589]1009 @print_log_dec
[876]1010 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[513]1011 """
1012 Return a scan where all spectra are converted to either
1013 Jansky or Kelvin depending upon the flux units of the scan table.
1014 By default the function tries to look the values up internally.
1015 If it can't find them (or if you want to over-ride), you must
1016 specify EITHER jyperk OR eta (and D which it will try to look up
1017 also if you don't set it). jyperk takes precedence if you set both.
1018 Parameters:
1019 jyperk: the Jy / K conversion factor
1020 eta: the aperture efficiency
1021 d: the geomtric diameter (metres)
1022 insitu: if False a new scantable is returned.
1023 Otherwise, the scaling is done in-situ
1024 The default is taken from .asaprc (False)
1025 """
1026 if insitu is None: insitu = rcParams['insitu']
[876]1027 self._math._setinsitu(insitu)
[513]1028 varlist = vars()
[1593]1029 jyperk = jyperk or -1.0
1030 d = d or -1.0
1031 eta = eta or -1.0
[876]1032 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1033 s._add_history("convert_flux", varlist)
1034 if insitu: self._assign(s)
1035 else: return s
[513]1036
[1589]1037 @print_log_dec
[876]1038 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[513]1039 """
1040 Return a scan after applying a gain-elevation correction.
1041 The correction can be made via either a polynomial or a
1042 table-based interpolation (and extrapolation if necessary).
1043 You specify polynomial coefficients, an ascii table or neither.
1044 If you specify neither, then a polynomial correction will be made
1045 with built in coefficients known for certain telescopes (an error
1046 will occur if the instrument is not known).
1047 The data and Tsys are *divided* by the scaling factors.
1048 Parameters:
1049 poly: Polynomial coefficients (default None) to compute a
1050 gain-elevation correction as a function of
1051 elevation (in degrees).
1052 filename: The name of an ascii file holding correction factors.
1053 The first row of the ascii file must give the column
1054 names and these MUST include columns
1055 "ELEVATION" (degrees) and "FACTOR" (multiply data
1056 by this) somewhere.
1057 The second row must give the data type of the
1058 column. Use 'R' for Real and 'I' for Integer.
1059 An example file would be
1060 (actual factors are arbitrary) :
1061
1062 TIME ELEVATION FACTOR
1063 R R R
1064 0.1 0 0.8
1065 0.2 20 0.85
1066 0.3 40 0.9
1067 0.4 60 0.85
1068 0.5 80 0.8
1069 0.6 90 0.75
1070 method: Interpolation method when correcting from a table.
1071 Values are "nearest", "linear" (default), "cubic"
1072 and "spline"
1073 insitu: if False a new scantable is returned.
1074 Otherwise, the scaling is done in-situ
1075 The default is taken from .asaprc (False)
1076 """
1077
1078 if insitu is None: insitu = rcParams['insitu']
[876]1079 self._math._setinsitu(insitu)
[513]1080 varlist = vars()
[1593]1081 poly = poly or ()
[513]1082 from os.path import expandvars
1083 filename = expandvars(filename)
[876]1084 s = scantable(self._math._gainel(self, poly, filename, method))
1085 s._add_history("gain_el", varlist)
[1593]1086 if insitu:
1087 self._assign(s)
1088 else:
1089 return s
[710]1090
[1589]1091 @print_log_dec
[931]1092 def freq_align(self, reftime=None, method='cubic', insitu=None):
[513]1093 """
1094 Return a scan where all rows have been aligned in frequency/velocity.
1095 The alignment frequency frame (e.g. LSRK) is that set by function
1096 set_freqframe.
1097 Parameters:
1098 reftime: reference time to align at. By default, the time of
1099 the first row of data is used.
1100 method: Interpolation method for regridding the spectra.
1101 Choose from "nearest", "linear", "cubic" (default)
1102 and "spline"
1103 insitu: if False a new scantable is returned.
1104 Otherwise, the scaling is done in-situ
1105 The default is taken from .asaprc (False)
1106 """
[931]1107 if insitu is None: insitu = rcParams["insitu"]
[876]1108 self._math._setinsitu(insitu)
[513]1109 varlist = vars()
[1593]1110 reftime = reftime or ""
[931]1111 s = scantable(self._math._freq_align(self, reftime, method))
[876]1112 s._add_history("freq_align", varlist)
1113 if insitu: self._assign(s)
1114 else: return s
[513]1115
[1589]1116 @print_log_dec
[876]1117 def opacity(self, tau, insitu=None):
[513]1118 """
1119 Apply an opacity correction. The data
1120 and Tsys are multiplied by the correction factor.
1121 Parameters:
[1689]1122 tau: (list of) opacity from which the correction factor is
[513]1123 exp(tau*ZD)
[1689]1124 where ZD is the zenith-distance.
1125 If a list is provided, it has to be of length nIF,
1126 nIF*nPol or 1 and in order of IF/POL, e.g.
1127 [opif0pol0, opif0pol1, opif1pol0 ...]
[513]1128 insitu: if False a new scantable is returned.
1129 Otherwise, the scaling is done in-situ
1130 The default is taken from .asaprc (False)
1131 """
1132 if insitu is None: insitu = rcParams['insitu']
[876]1133 self._math._setinsitu(insitu)
[513]1134 varlist = vars()
[1689]1135 if not hasattr(tau, "__len__"):
1136 tau = [tau]
[876]1137 s = scantable(self._math._opacity(self, tau))
1138 s._add_history("opacity", varlist)
1139 if insitu: self._assign(s)
1140 else: return s
[513]1141
[1589]1142 @print_log_dec
[513]1143 def bin(self, width=5, insitu=None):
1144 """
1145 Return a scan where all spectra have been binned up.
[1348]1146 Parameters:
[513]1147 width: The bin width (default=5) in pixels
1148 insitu: if False a new scantable is returned.
1149 Otherwise, the scaling is done in-situ
1150 The default is taken from .asaprc (False)
1151 """
1152 if insitu is None: insitu = rcParams['insitu']
[876]1153 self._math._setinsitu(insitu)
[513]1154 varlist = vars()
[876]1155 s = scantable(self._math._bin(self, width))
[1118]1156 s._add_history("bin", varlist)
[1589]1157 if insitu:
1158 self._assign(s)
1159 else:
1160 return s
[513]1161
[1589]1162 @print_log_dec
[513]1163 def resample(self, width=5, method='cubic', insitu=None):
1164 """
[1348]1165 Return a scan where all spectra have been binned up.
[1573]1166
[1348]1167 Parameters:
[513]1168 width: The bin width (default=5) in pixels
1169 method: Interpolation method when correcting from a table.
1170 Values are "nearest", "linear", "cubic" (default)
1171 and "spline"
1172 insitu: if False a new scantable is returned.
1173 Otherwise, the scaling is done in-situ
1174 The default is taken from .asaprc (False)
1175 """
1176 if insitu is None: insitu = rcParams['insitu']
[876]1177 self._math._setinsitu(insitu)
[513]1178 varlist = vars()
[876]1179 s = scantable(self._math._resample(self, method, width))
[1118]1180 s._add_history("resample", varlist)
[876]1181 if insitu: self._assign(s)
1182 else: return s
[513]1183
[1589]1184 @print_log_dec
[946]1185 def average_pol(self, mask=None, weight='none'):
1186 """
1187 Average the Polarisations together.
1188 Parameters:
1189 mask: An optional mask defining the region, where the
1190 averaging will be applied. The output will have all
1191 specified points masked.
1192 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1193 weighted), or 'tsys' (1/Tsys**2 weighted)
1194 """
1195 varlist = vars()
[1593]1196 mask = mask or ()
[1010]1197 s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]1198 s._add_history("average_pol", varlist)
[992]1199 return s
[513]1200
[1589]1201 @print_log_dec
[1145]1202 def average_beam(self, mask=None, weight='none'):
1203 """
1204 Average the Beams together.
1205 Parameters:
1206 mask: An optional mask defining the region, where the
1207 averaging will be applied. The output will have all
1208 specified points masked.
1209 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1210 weighted), or 'tsys' (1/Tsys**2 weighted)
1211 """
1212 varlist = vars()
[1593]1213 mask = mask or ()
[1145]1214 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1215 s._add_history("average_beam", varlist)
1216 return s
1217
[1586]1218 def parallactify(self, pflag):
[1617]1219 """
1220 Set a flag to inidcate whether this data should be treated as having
1221 been 'parallactified' (total phase == 0.0)
1222 Parameters:
1223 pflag: Bool inidcating whether to turn this on (True) or
1224 off (False)
1225 """
[1586]1226 varlist = vars()
1227 self._parallactify(pflag)
1228 self._add_history("parallactify", varlist)
1229
[1589]1230 @print_log_dec
[992]1231 def convert_pol(self, poltype=None):
1232 """
1233 Convert the data to a different polarisation type.
[1565]1234 Note that you will need cross-polarisation terms for most conversions.
[992]1235 Parameters:
1236 poltype: The new polarisation type. Valid types are:
[1565]1237 "linear", "circular", "stokes" and "linpol"
[992]1238 """
1239 varlist = vars()
1240 try:
1241 s = scantable(self._math._convertpol(self, poltype))
[1118]1242 except RuntimeError, msg:
[992]1243 if rcParams['verbose']:
[1118]1244 print msg
1245 return
[992]1246 else:
1247 raise
[1118]1248 s._add_history("convert_pol", varlist)
[992]1249 return s
1250
[1589]1251 @print_log_dec
[1574]1252 def smooth(self, kernel="hanning", width=5.0, order=2, insitu=None):
[513]1253 """
1254 Smooth the spectrum by the specified kernel (conserving flux).
1255 Parameters:
1256 kernel: The type of smoothing kernel. Select from
[1574]1257 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1258 or 'poly'
[513]1259 width: The width of the kernel in pixels. For hanning this is
1260 ignored otherwise it defauls to 5 pixels.
1261 For 'gaussian' it is the Full Width Half
1262 Maximum. For 'boxcar' it is the full width.
[1574]1263 For 'rmedian' and 'poly' it is the half width.
1264 order: Optional parameter for 'poly' kernel (default is 2), to
1265 specify the order of the polnomial. Ignored by all other
1266 kernels.
[513]1267 insitu: if False a new scantable is returned.
1268 Otherwise, the scaling is done in-situ
1269 The default is taken from .asaprc (False)
1270 Example:
1271 none
1272 """
1273 if insitu is None: insitu = rcParams['insitu']
[876]1274 self._math._setinsitu(insitu)
[513]1275 varlist = vars()
[1574]1276 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
[876]1277 s._add_history("smooth", varlist)
1278 if insitu: self._assign(s)
1279 else: return s
[513]1280
[1589]1281 @print_log_dec
[1617]1282 def poly_baseline(self, mask=None, order=0, plot=False, uselin=False,
1283 insitu=None):
[513]1284 """
1285 Return a scan which has been baselined (all rows) by a polynomial.
1286 Parameters:
[794]1287 mask: an optional mask
1288 order: the order of the polynomial (default is 0)
[1061]1289 plot: plot the fit and the residual. In this each
1290 indivual fit has to be approved, by typing 'y'
1291 or 'n'
[1391]1292 uselin: use linear polynomial fit
[794]1293 insitu: if False a new scantable is returned.
1294 Otherwise, the scaling is done in-situ
1295 The default is taken from .asaprc (False)
[513]1296 Example:
1297 # return a scan baselined by a third order polynomial,
1298 # not using a mask
1299 bscan = scan.poly_baseline(order=3)
[579]1300 """
[513]1301 if insitu is None: insitu = rcParams['insitu']
1302 varlist = vars()
1303 if mask is None:
[1295]1304 mask = [True for i in xrange(self.nchan(-1))]
[513]1305 from asap.asapfitter import fitter
[1217]1306 try:
1307 f = fitter()
1308 f.set_scan(self, mask)
[1391]1309 #f.set_function(poly=order)
1310 if uselin:
1311 f.set_function(lpoly=order)
1312 else:
1313 f.set_function(poly=order)
[1217]1314 s = f.auto_fit(insitu, plot=plot)
1315 s._add_history("poly_baseline", varlist)
1316 if insitu: self._assign(s)
1317 else: return s
1318 except RuntimeError:
1319 msg = "The fit failed, possibly because it didn't converge."
1320 if rcParams['verbose']:
1321 print msg
1322 return
1323 else:
1324 raise RuntimeError(msg)
[513]1325
[1118]1326 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
[1280]1327 threshold=3, chan_avg_limit=1, plot=False,
1328 insitu=None):
[880]1329 """
1330 Return a scan which has been baselined (all rows) by a polynomial.
1331 Spectral lines are detected first using linefinder and masked out
1332 to avoid them affecting the baseline solution.
1333
1334 Parameters:
1335 mask: an optional mask retreived from scantable
1336 edge: an optional number of channel to drop at
1337 the edge of spectrum. If only one value is
1338 specified, the same number will be dropped from
1339 both sides of the spectrum. Default is to keep
[907]1340 all channels. Nested tuples represent individual
[976]1341 edge selection for different IFs (a number of spectral
1342 channels can be different)
[880]1343 order: the order of the polynomial (default is 0)
1344 threshold: the threshold used by line finder. It is better to
1345 keep it large as only strong lines affect the
1346 baseline solution.
[1280]1347 chan_avg_limit:
1348 a maximum number of consequtive spectral channels to
1349 average during the search of weak and broad lines.
1350 The default is no averaging (and no search for weak
1351 lines). If such lines can affect the fitted baseline
1352 (e.g. a high order polynomial is fitted), increase this
1353 parameter (usually values up to 8 are reasonable). Most
1354 users of this method should find the default value
1355 sufficient.
[1061]1356 plot: plot the fit and the residual. In this each
1357 indivual fit has to be approved, by typing 'y'
1358 or 'n'
[880]1359 insitu: if False a new scantable is returned.
1360 Otherwise, the scaling is done in-situ
1361 The default is taken from .asaprc (False)
1362
1363 Example:
1364 scan2=scan.auto_poly_baseline(order=7)
1365 """
1366 if insitu is None: insitu = rcParams['insitu']
1367 varlist = vars()
1368 from asap.asapfitter import fitter
1369 from asap.asaplinefind import linefinder
1370 from asap import _is_sequence_or_number as _is_valid
1371
[976]1372 # check whether edge is set up for each IF individually
[1118]1373 individualedge = False;
1374 if len(edge) > 1:
1375 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1376 individualedge = True;
[907]1377
[1118]1378 if not _is_valid(edge, int) and not individualedge:
[909]1379 raise ValueError, "Parameter 'edge' has to be an integer or a \
[907]1380 pair of integers specified as a tuple. Nested tuples are allowed \
1381 to make individual selection for different IFs."
[919]1382
[1118]1383 curedge = (0, 0)
1384 if individualedge:
1385 for edgepar in edge:
1386 if not _is_valid(edgepar, int):
1387 raise ValueError, "Each element of the 'edge' tuple has \
1388 to be a pair of integers or an integer."
[907]1389 else:
[1118]1390 curedge = edge;
[880]1391
1392 # setup fitter
1393 f = fitter()
1394 f.set_function(poly=order)
1395
1396 # setup line finder
[1118]1397 fl = linefinder()
[1268]1398 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
[880]1399
1400 if not insitu:
[1118]1401 workscan = self.copy()
[880]1402 else:
[1118]1403 workscan = self
[880]1404
[907]1405 fl.set_scan(workscan)
1406
[1118]1407 rows = range(workscan.nrow())
[880]1408 asaplog.push("Processing:")
1409 for r in rows:
[1118]1410 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1411 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1412 workscan.getpol(r), workscan.getcycle(r))
[880]1413 asaplog.push(msg, False)
[907]1414
[976]1415 # figure out edge parameter
[1118]1416 if individualedge:
1417 if len(edge) >= workscan.getif(r):
1418 raise RuntimeError, "Number of edge elements appear to " \
1419 "be less than the number of IFs"
1420 curedge = edge[workscan.getif(r)]
[919]1421
[976]1422 # setup line finder
[1118]1423 fl.find_lines(r, mask, curedge)
[1536]1424 f.set_data(workscan._getabcissa(r), workscan._getspectrum(r),
1425 mask_and(workscan._getmask(r), fl.get_mask()))
[880]1426 f.fit()
1427 x = f.get_parameters()
[1061]1428 if plot:
1429 f.plot(residual=True)
1430 x = raw_input("Accept fit ( [y]/n ): ")
1431 if x.upper() == 'N':
1432 continue
[880]1433 workscan._setspectrum(f.fitter.getresidual(), r)
[1061]1434 if plot:
1435 f._p.unmap()
1436 f._p = None
1437 workscan._add_history("auto_poly_baseline", varlist)
[880]1438 if insitu:
1439 self._assign(workscan)
1440 else:
1441 return workscan
1442
[1589]1443 @print_log_dec
[914]1444 def rotate_linpolphase(self, angle):
1445 """
1446 Rotate the phase of the complex polarization O=Q+iU correlation.
1447 This is always done in situ in the raw data. So if you call this
1448 function more than once then each call rotates the phase further.
1449 Parameters:
1450 angle: The angle (degrees) to rotate (add) by.
1451 Examples:
1452 scan.rotate_linpolphase(2.3)
1453 """
1454 varlist = vars()
[936]1455 self._math._rotate_linpolphase(self, angle)
[914]1456 self._add_history("rotate_linpolphase", varlist)
1457 return
[710]1458
[1589]1459 @print_log_dec
[914]1460 def rotate_xyphase(self, angle):
1461 """
1462 Rotate the phase of the XY correlation. This is always done in situ
1463 in the data. So if you call this function more than once
1464 then each call rotates the phase further.
1465 Parameters:
1466 angle: The angle (degrees) to rotate (add) by.
1467 Examples:
1468 scan.rotate_xyphase(2.3)
1469 """
1470 varlist = vars()
[936]1471 self._math._rotate_xyphase(self, angle)
[914]1472 self._add_history("rotate_xyphase", varlist)
1473 return
1474
[1589]1475 @print_log_dec
[914]1476 def swap_linears(self):
1477 """
[1573]1478 Swap the linear polarisations XX and YY, or better the first two
[1348]1479 polarisations as this also works for ciculars.
[914]1480 """
1481 varlist = vars()
[936]1482 self._math._swap_linears(self)
[914]1483 self._add_history("swap_linears", varlist)
1484 return
1485
[1589]1486 @print_log_dec
[914]1487 def invert_phase(self):
1488 """
1489 Invert the phase of the complex polarisation
1490 """
1491 varlist = vars()
[936]1492 self._math._invert_phase(self)
[914]1493 self._add_history("invert_phase", varlist)
1494 return
1495
[1589]1496 @print_log_dec
[876]1497 def add(self, offset, insitu=None):
[513]1498 """
1499 Return a scan where all spectra have the offset added
1500 Parameters:
1501 offset: the offset
1502 insitu: if False a new scantable is returned.
1503 Otherwise, the scaling is done in-situ
1504 The default is taken from .asaprc (False)
1505 """
1506 if insitu is None: insitu = rcParams['insitu']
[876]1507 self._math._setinsitu(insitu)
[513]1508 varlist = vars()
[876]1509 s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]1510 s._add_history("add", varlist)
[876]1511 if insitu:
1512 self._assign(s)
1513 else:
[513]1514 return s
1515
[1589]1516 @print_log_dec
[1308]1517 def scale(self, factor, tsys=True, insitu=None):
[513]1518 """
1519 Return a scan where all spectra are scaled by the give 'factor'
1520 Parameters:
1521 factor: the scaling factor
1522 insitu: if False a new scantable is returned.
1523 Otherwise, the scaling is done in-situ
1524 The default is taken from .asaprc (False)
1525 tsys: if True (default) then apply the operation to Tsys
1526 as well as the data
1527 """
1528 if insitu is None: insitu = rcParams['insitu']
[876]1529 self._math._setinsitu(insitu)
[513]1530 varlist = vars()
[876]1531 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
[1118]1532 s._add_history("scale", varlist)
[876]1533 if insitu:
1534 self._assign(s)
1535 else:
[513]1536 return s
1537
[1504]1538 def set_sourcetype(self, match, matchtype="pattern",
1539 sourcetype="reference"):
[1502]1540 """
1541 Set the type of the source to be an source or reference scan
1542 using the provided pattern:
1543 Parameters:
[1504]1544 match: a Unix style pattern, regular expression or selector
1545 matchtype: 'pattern' (default) UNIX style pattern or
1546 'regex' regular expression
[1502]1547 sourcetype: the type of the source to use (source/reference)
1548 """
1549 varlist = vars()
1550 basesel = self.get_selection()
1551 stype = -1
1552 if sourcetype.lower().startswith("r"):
1553 stype = 1
1554 elif sourcetype.lower().startswith("s"):
1555 stype = 0
[1504]1556 else:
[1502]1557 raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
[1504]1558 if matchtype.lower().startswith("p"):
1559 matchtype = "pattern"
1560 elif matchtype.lower().startswith("r"):
1561 matchtype = "regex"
1562 else:
1563 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
[1502]1564 sel = selector()
1565 if isinstance(match, selector):
1566 sel = match
1567 else:
[1504]1568 sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
[1502]1569 self.set_selection(basesel+sel)
1570 self._setsourcetype(stype)
1571 self.set_selection(basesel)
[1573]1572 self._add_history("set_sourcetype", varlist)
[1502]1573
[1589]1574 @print_log_dec
[1348]1575 def auto_quotient(self, preserve=True, mode='paired'):
[670]1576 """
1577 This function allows to build quotients automatically.
1578 It assumes the observation to have the same numer of
1579 "ons" and "offs"
1580 Parameters:
[710]1581 preserve: you can preserve (default) the continuum or
1582 remove it. The equations used are
[670]1583 preserve: Output = Toff * (on/off) - Toff
[1070]1584 remove: Output = Toff * (on/off) - Ton
[1573]1585 mode: the on/off detection mode
[1348]1586 'paired' (default)
1587 identifies 'off' scans by the
1588 trailing '_R' (Mopra/Parkes) or
1589 '_e'/'_w' (Tid) and matches
1590 on/off pairs from the observing pattern
[1502]1591 'time'
1592 finds the closest off in time
[1348]1593
[670]1594 """
[1348]1595 modes = ["time", "paired"]
[670]1596 if not mode in modes:
[876]1597 msg = "please provide valid mode. Valid modes are %s" % (modes)
1598 raise ValueError(msg)
1599 varlist = vars()
[1348]1600 s = None
1601 if mode.lower() == "paired":
1602 basesel = self.get_selection()
[1356]1603 sel = selector()+basesel
1604 sel.set_query("SRCTYPE==1")
1605 self.set_selection(sel)
[1348]1606 offs = self.copy()
1607 sel.set_query("SRCTYPE==0")
[1356]1608 self.set_selection(sel)
[1348]1609 ons = self.copy()
1610 s = scantable(self._math._quotient(ons, offs, preserve))
1611 self.set_selection(basesel)
1612 elif mode.lower() == "time":
1613 s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]1614 s._add_history("auto_quotient", varlist)
[876]1615 return s
[710]1616
[1589]1617 @print_log_dec
[1145]1618 def mx_quotient(self, mask = None, weight='median', preserve=True):
[1141]1619 """
[1143]1620 Form a quotient using "off" beams when observing in "MX" mode.
1621 Parameters:
[1145]1622 mask: an optional mask to be used when weight == 'stddev'
[1143]1623 weight: How to average the off beams. Default is 'median'.
[1145]1624 preserve: you can preserve (default) the continuum or
1625 remove it. The equations used are
1626 preserve: Output = Toff * (on/off) - Toff
1627 remove: Output = Toff * (on/off) - Ton
[1217]1628 """
[1593]1629 mask = mask or ()
[1141]1630 varlist = vars()
1631 on = scantable(self._math._mx_extract(self, 'on'))
[1143]1632 preoff = scantable(self._math._mx_extract(self, 'off'))
1633 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1217]1634 from asapmath import quotient
[1145]1635 q = quotient(on, off, preserve)
[1143]1636 q._add_history("mx_quotient", varlist)
[1217]1637 return q
[513]1638
[1589]1639 @print_log_dec
[718]1640 def freq_switch(self, insitu=None):
1641 """
1642 Apply frequency switching to the data.
1643 Parameters:
1644 insitu: if False a new scantable is returned.
1645 Otherwise, the swictching is done in-situ
1646 The default is taken from .asaprc (False)
1647 Example:
1648 none
1649 """
1650 if insitu is None: insitu = rcParams['insitu']
[876]1651 self._math._setinsitu(insitu)
[718]1652 varlist = vars()
[876]1653 s = scantable(self._math._freqswitch(self))
[1118]1654 s._add_history("freq_switch", varlist)
[876]1655 if insitu: self._assign(s)
1656 else: return s
[718]1657
[1589]1658 @print_log_dec
[780]1659 def recalc_azel(self):
1660 """
1661 Recalculate the azimuth and elevation for each position.
1662 Parameters:
1663 none
1664 Example:
1665 """
1666 varlist = vars()
[876]1667 self._recalcazel()
[780]1668 self._add_history("recalc_azel", varlist)
1669 return
1670
[1589]1671 @print_log_dec
[513]1672 def __add__(self, other):
1673 varlist = vars()
1674 s = None
1675 if isinstance(other, scantable):
[1573]1676 s = scantable(self._math._binaryop(self, other, "ADD"))
[513]1677 elif isinstance(other, float):
[876]1678 s = scantable(self._math._unaryop(self, other, "ADD", False))
[513]1679 else:
[718]1680 raise TypeError("Other input is not a scantable or float value")
[513]1681 s._add_history("operator +", varlist)
1682 return s
1683
[1589]1684 @print_log_dec
[513]1685 def __sub__(self, other):
1686 """
1687 implicit on all axes and on Tsys
1688 """
1689 varlist = vars()
1690 s = None
1691 if isinstance(other, scantable):
[1588]1692 s = scantable(self._math._binaryop(self, other, "SUB"))
[513]1693 elif isinstance(other, float):
[876]1694 s = scantable(self._math._unaryop(self, other, "SUB", False))
[513]1695 else:
[718]1696 raise TypeError("Other input is not a scantable or float value")
[513]1697 s._add_history("operator -", varlist)
1698 return s
[710]1699
[1589]1700 @print_log_dec
[513]1701 def __mul__(self, other):
1702 """
1703 implicit on all axes and on Tsys
1704 """
1705 varlist = vars()
1706 s = None
1707 if isinstance(other, scantable):
[1588]1708 s = scantable(self._math._binaryop(self, other, "MUL"))
[513]1709 elif isinstance(other, float):
[876]1710 s = scantable(self._math._unaryop(self, other, "MUL", False))
[513]1711 else:
[718]1712 raise TypeError("Other input is not a scantable or float value")
[513]1713 s._add_history("operator *", varlist)
1714 return s
1715
[710]1716
[1589]1717 @print_log_dec
[513]1718 def __div__(self, other):
1719 """
1720 implicit on all axes and on Tsys
1721 """
1722 varlist = vars()
1723 s = None
1724 if isinstance(other, scantable):
[1589]1725 s = scantable(self._math._binaryop(self, other, "DIV"))
[513]1726 elif isinstance(other, float):
1727 if other == 0.0:
[718]1728 raise ZeroDivisionError("Dividing by zero is not recommended")
[876]1729 s = scantable(self._math._unaryop(self, other, "DIV", False))
[513]1730 else:
[718]1731 raise TypeError("Other input is not a scantable or float value")
[513]1732 s._add_history("operator /", varlist)
1733 return s
1734
[530]1735 def get_fit(self, row=0):
1736 """
1737 Print or return the stored fits for a row in the scantable
1738 Parameters:
1739 row: the row which the fit has been applied to.
1740 """
1741 if row > self.nrow():
1742 return
[976]1743 from asap.asapfit import asapfit
[530]1744 fit = asapfit(self._getfit(row))
[718]1745 if rcParams['verbose']:
[530]1746 print fit
1747 return
1748 else:
1749 return fit.as_dict()
1750
[1483]1751 def flag_nans(self):
1752 """
1753 Utility function to flag NaN values in the scantable.
1754 """
1755 import numpy
1756 basesel = self.get_selection()
1757 for i in range(self.nrow()):
[1589]1758 sel = self.get_row_selector(i)
1759 self.set_selection(basesel+sel)
[1483]1760 nans = numpy.isnan(self._getspectrum(0))
1761 if numpy.any(nans):
1762 bnans = [ bool(v) for v in nans]
1763 self.flag(bnans)
1764 self.set_selection(basesel)
1765
[1588]1766 def get_row_selector(self, rowno):
1767 return selector(beams=self.getbeam(rowno),
1768 ifs=self.getif(rowno),
1769 pols=self.getpol(rowno),
1770 scans=self.getscan(rowno),
1771 cycles=self.getcycle(rowno))
[1573]1772
[484]1773 def _add_history(self, funcname, parameters):
[1435]1774 if not rcParams['scantable.history']:
1775 return
[484]1776 # create date
1777 sep = "##"
1778 from datetime import datetime
1779 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1780 hist = dstr+sep
1781 hist += funcname+sep#cdate+sep
1782 if parameters.has_key('self'): del parameters['self']
[1118]1783 for k, v in parameters.iteritems():
[484]1784 if type(v) is dict:
[1118]1785 for k2, v2 in v.iteritems():
[484]1786 hist += k2
1787 hist += "="
[1118]1788 if isinstance(v2, scantable):
[484]1789 hist += 'scantable'
1790 elif k2 == 'mask':
[1118]1791 if isinstance(v2, list) or isinstance(v2, tuple):
[513]1792 hist += str(self._zip_mask(v2))
1793 else:
1794 hist += str(v2)
[484]1795 else:
[513]1796 hist += str(v2)
[484]1797 else:
1798 hist += k
1799 hist += "="
[1118]1800 if isinstance(v, scantable):
[484]1801 hist += 'scantable'
1802 elif k == 'mask':
[1118]1803 if isinstance(v, list) or isinstance(v, tuple):
[513]1804 hist += str(self._zip_mask(v))
1805 else:
1806 hist += str(v)
[484]1807 else:
1808 hist += str(v)
1809 hist += sep
1810 hist = hist[:-2] # remove trailing '##'
1811 self._addhistory(hist)
1812
[710]1813
[484]1814 def _zip_mask(self, mask):
1815 mask = list(mask)
1816 i = 0
1817 segments = []
1818 while mask[i:].count(1):
1819 i += mask[i:].index(1)
1820 if mask[i:].count(0):
1821 j = i + mask[i:].index(0)
1822 else:
[710]1823 j = len(mask)
[1118]1824 segments.append([i, j])
[710]1825 i = j
[484]1826 return segments
[714]1827
[626]1828 def _get_ordinate_label(self):
1829 fu = "("+self.get_fluxunit()+")"
1830 import re
1831 lbl = "Intensity"
[1118]1832 if re.match(".K.", fu):
[626]1833 lbl = "Brightness Temperature "+ fu
[1118]1834 elif re.match(".Jy.", fu):
[626]1835 lbl = "Flux density "+ fu
1836 return lbl
[710]1837
[876]1838 def _check_ifs(self):
1839 nchans = [self.nchan(i) for i in range(self.nif(-1))]
[889]1840 nchans = filter(lambda t: t > 0, nchans)
[876]1841 return (sum(nchans)/len(nchans) == nchans[0])
[976]1842
1843 def _fill(self, names, unit, average):
1844 import os
1845 from asap._asap import stfiller
1846 first = True
1847 fullnames = []
1848 for name in names:
1849 name = os.path.expandvars(name)
1850 name = os.path.expanduser(name)
1851 if not os.path.exists(name):
1852 msg = "File '%s' does not exists" % (name)
1853 if rcParams['verbose']:
1854 asaplog.push(msg)
1855 print asaplog.pop().strip()
1856 return
1857 raise IOError(msg)
1858 fullnames.append(name)
1859 if average:
1860 asaplog.push('Auto averaging integrations')
[1079]1861 stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]1862 for name in fullnames:
[1073]1863 tbl = Scantable(stype)
1864 r = stfiller(tbl)
[1504]1865 rx = rcParams['scantable.reference']
1866 r._setreferenceexpr(rx)
[976]1867 msg = "Importing %s..." % (name)
[1118]1868 asaplog.push(msg, False)
[976]1869 print_log()
[1118]1870 r._open(name, -1, -1)
[976]1871 r._read()
1872 if average:
[1118]1873 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]1874 if not first:
1875 tbl = self._math._merge([self, tbl])
1876 Scantable.__init__(self, tbl)
1877 r._close()
[1118]1878 del r, tbl
[976]1879 first = False
1880 if unit is not None:
1881 self.set_fluxunit(unit)
1882 self.set_freqframe(rcParams['scantable.freqframe'])
1883
[1402]1884 def __getitem__(self, key):
1885 if key < 0:
1886 key += self.nrow()
1887 if key >= self.nrow():
1888 raise IndexError("Row index out of range.")
1889 return self._getspectrum(key)
1890
1891 def __setitem__(self, key, value):
1892 if key < 0:
1893 key += self.nrow()
1894 if key >= self.nrow():
1895 raise IndexError("Row index out of range.")
1896 if not hasattr(value, "__len__") or \
1897 len(value) > self.nchan(self.getif(key)):
1898 raise ValueError("Spectrum length doesn't match.")
1899 return self._setspectrum(value, key)
1900
1901 def __len__(self):
1902 return self.nrow()
1903
1904 def __iter__(self):
1905 for i in range(len(self)):
1906 yield self[i]
Note: See TracBrowser for help on using the repository browser.