source: branches/alma/python/scantable.py@ 1449

Last change on this file since 1449 was 1446, checked in by TakTsutsumi, 16 years ago

Merged recent updates (since 2007) from nrao-asap

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 72.4 KB
RevLine 
[876]1from asap._asap import Scantable
[226]2from asap import rcParams
[1118]3from asap import print_log
4from asap import asaplog
[946]5from asap import selector
[1153]6from asap import linecatalog
[1295]7from asap import _n_bools, mask_not, mask_and, mask_or
[102]8
[876]9class scantable(Scantable):
[102]10 """
11 The ASAP container for scans
12 """
[710]13
[484]14 def __init__(self, filename, average=None, unit=None):
[102]15 """
16 Create a scantable from a saved one or make a reference
17 Parameters:
[181]18 filename: the name of an asap table on disk
19 or
20 the name of a rpfits/sdfits/ms file
21 (integrations within scans are auto averaged
22 and the whole file is read)
23 or
24 [advanced] a reference to an existing
[102]25 scantable
[484]26 average: average all integrations withinb a scan on read.
27 The default (True) is taken from .asaprc.
28 unit: brightness unit; must be consistent with K or Jy.
[340]29 Over-rides the default selected by the reader
30 (input rpfits/sdfits/ms) or replaces the value
31 in existing scantables
[710]32 """
[976]33 if average is None:
[710]34 average = rcParams['scantable.autoaverage']
[1259]35 varlist = vars()
[876]36 from asap._asap import stmath
37 self._math = stmath()
38 if isinstance(filename, Scantable):
39 Scantable.__init__(self, filename)
[181]40 else:
[1118]41 if isinstance(filename, str):
[976]42 import os.path
43 filename = os.path.expandvars(filename)
44 filename = os.path.expanduser(filename)
45 if not os.path.exists(filename):
46 s = "File '%s' not found." % (filename)
[718]47 if rcParams['verbose']:
[976]48 asaplog.push(s)
49 print asaplog.pop().strip()
[718]50 return
[976]51 raise IOError(s)
[1115]52 if os.path.isdir(filename) \
[1118]53 and not os.path.exists(filename+'/table.f1'):
[976]54 # crude check if asap table
55 if os.path.exists(filename+'/table.info'):
[1118]56 ondisk = rcParams['scantable.storage'] == 'disk'
57 Scantable.__init__(self, filename, ondisk)
[976]58 if unit is not None:
59 self.set_fluxunit(unit)
60 self.set_freqframe(rcParams['scantable.freqframe'])
[718]61 else:
[1118]62 msg = "The given file '%s'is not a valid " \
63 "asap table." % (filename)
[976]64 if rcParams['verbose']:
65 print msg
66 return
67 else:
68 raise IOError(msg)
[226]69 else:
[1118]70 self._fill([filename], unit, average)
71 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]72 and isinstance(filename[-1], str):
73 self._fill(filename, unit, average)
[1259]74 self._add_history("scantable", varlist)
[714]75 print_log()
[102]76
[876]77 def save(self, name=None, format=None, overwrite=False):
[116]78 """
[1280]79 Store the scantable on disk. This can be an asap (aips++) Table,
80 SDFITS or MS2 format.
[116]81 Parameters:
[1093]82 name: the name of the outputfile. For format "ASCII"
83 this is the root file name (data in 'name'.txt
[497]84 and header in 'name'_header.txt)
[116]85 format: an optional file format. Default is ASAP.
[280]86 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
[194]87 'SDFITS' (save as SDFITS file)
[200]88 'ASCII' (saves as ascii text file)
[226]89 'MS2' (saves as an aips++
90 MeasurementSet V2)
[411]91 overwrite: If the file should be overwritten if it exists.
[256]92 The default False is to return with warning
[411]93 without writing the output. USE WITH CARE.
[116]94 Example:
95 scan.save('myscan.asap')
[1118]96 scan.save('myscan.sdfits', 'SDFITS')
[116]97 """
[411]98 from os import path
[226]99 if format is None: format = rcParams['scantable.save']
[256]100 suffix = '.'+format.lower()
[1118]101 if name is None or name == "":
[256]102 name = 'scantable'+suffix
[718]103 msg = "No filename given. Using default name %s..." % name
104 asaplog.push(msg)
[411]105 name = path.expandvars(name)
[256]106 if path.isfile(name) or path.isdir(name):
107 if not overwrite:
[718]108 msg = "File %s exists." % name
109 if rcParams['verbose']:
110 print msg
111 return
112 else:
113 raise IOError(msg)
[451]114 format2 = format.upper()
115 if format2 == 'ASAP':
[116]116 self._save(name)
117 else:
[989]118 from asap._asap import stwriter as stw
[1118]119 writer = stw(format2)
120 writer.write(self, name)
[718]121 print_log()
[116]122 return
123
[102]124 def copy(self):
125 """
126 Return a copy of this scantable.
[1348]127 Note:
128 This makes a full (deep) copy. scan2 = scan1 makes a reference.
[102]129 Parameters:
[113]130 none
[102]131 Example:
132 copiedscan = scan.copy()
133 """
[876]134 sd = scantable(Scantable._copy(self))
[113]135 return sd
136
[1093]137 def drop_scan(self, scanid=None):
138 """
139 Return a new scantable where the specified scan number(s) has(have)
140 been dropped.
141 Parameters:
142 scanid: a (list of) scan number(s)
143 """
144 from asap import _is_sequence_or_number as _is_valid
145 from asap import _to_list
146 from asap import unique
147 if not _is_valid(scanid):
148 if rcParams['verbose']:
149 print "Please specify a scanno to drop from the scantable"
150 return
151 else:
152 raise RuntimeError("No scan given")
153 try:
154 scanid = _to_list(scanid)
155 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
156 for sid in scanid: allscans.remove(sid)
[1118]157 if len(allscans) == 0:
158 raise ValueError("Can't remove all scans")
[1093]159 except ValueError:
160 if rcParams['verbose']:
161 print "Couldn't find any match."
162 return
163 else: raise
164 try:
165 bsel = self.get_selection()
166 sel = selector()
167 sel.set_scans(allscans)
168 self.set_selection(bsel+sel)
169 scopy = self._copy()
170 self.set_selection(bsel)
171 return scantable(scopy)
172 except RuntimeError:
[1118]173 if rcParams['verbose']:
174 print "Couldn't find any match."
175 else:
176 raise
[1093]177
178
[102]179 def get_scan(self, scanid=None):
180 """
181 Return a specific scan (by scanno) or collection of scans (by
182 source name) in a new scantable.
[1348]183 Note:
184 See scantable.drop_scan() for the inverse operation.
[102]185 Parameters:
[513]186 scanid: a (list of) scanno or a source name, unix-style
187 patterns are accepted for source name matching, e.g.
188 '*_R' gets all 'ref scans
[102]189 Example:
[513]190 # get all scans containing the source '323p459'
191 newscan = scan.get_scan('323p459')
192 # get all 'off' scans
193 refscans = scan.get_scan('*_R')
194 # get a susbset of scans by scanno (as listed in scan.summary())
[1118]195 newscan = scan.get_scan([0, 2, 7, 10])
[102]196 """
197 if scanid is None:
[718]198 if rcParams['verbose']:
[1118]199 print "Please specify a scan no or name to " \
200 "retrieve from the scantable"
[718]201 return
202 else:
203 raise RuntimeError("No scan given")
204
[102]205 try:
[946]206 bsel = self.get_selection()
207 sel = selector()
[102]208 if type(scanid) is str:
[946]209 sel.set_name(scanid)
210 self.set_selection(bsel+sel)
[876]211 scopy = self._copy()
[946]212 self.set_selection(bsel)
[876]213 return scantable(scopy)
[102]214 elif type(scanid) is int:
[946]215 sel.set_scans([scanid])
216 self.set_selection(bsel+sel)
[876]217 scopy = self._copy()
[946]218 self.set_selection(bsel)
[876]219 return scantable(scopy)
[381]220 elif type(scanid) is list:
[946]221 sel.set_scans(scanid)
222 self.set_selection(sel)
[876]223 scopy = self._copy()
[946]224 self.set_selection(bsel)
[876]225 return scantable(scopy)
[381]226 else:
[718]227 msg = "Illegal scanid type, use 'int' or 'list' if ints."
228 if rcParams['verbose']:
229 print msg
230 else:
231 raise TypeError(msg)
[102]232 except RuntimeError:
[718]233 if rcParams['verbose']: print "Couldn't find any match."
234 else: raise
[102]235
236 def __str__(self):
[1118]237 return Scantable._summary(self, True)
[102]238
[976]239 def summary(self, filename=None):
[102]240 """
241 Print a summary of the contents of this scantable.
242 Parameters:
243 filename: the name of a file to write the putput to
244 Default - no file output
[381]245 verbose: print extra info such as the frequency table
246 The default (False) is taken from .asaprc
[102]247 """
[976]248 info = Scantable._summary(self, True)
249 #if verbose is None: verbose = rcParams['scantable.verbosesummary']
[102]250 if filename is not None:
[256]251 if filename is "":
252 filename = 'scantable_summary.txt'
[415]253 from os.path import expandvars, isdir
[411]254 filename = expandvars(filename)
[415]255 if not isdir(filename):
[413]256 data = open(filename, 'w')
257 data.write(info)
258 data.close()
259 else:
[718]260 msg = "Illegal file name '%s'." % (filename)
261 if rcParams['verbose']:
262 print msg
263 else:
264 raise IOError(msg)
265 if rcParams['verbose']:
[794]266 try:
267 from IPython.genutils import page as pager
268 except ImportError:
269 from pydoc import pager
270 pager(info)
[718]271 else:
272 return info
[710]273
[946]274
275 def get_selection(self):
276 """
[1005]277 Get the selection object currently set on this scantable.
278 Parameters:
279 none
280 Example:
281 sel = scan.get_selection()
282 sel.set_ifs(0) # select IF 0
283 scan.set_selection(sel) # apply modified selection
[946]284 """
285 return selector(self._getselection())
286
[1005]287 def set_selection(self, selection=selector()):
[946]288 """
[1005]289 Select a subset of the data. All following operations on this scantable
290 are only applied to thi selection.
291 Parameters:
292 selection: a selector object (default unset the selection)
293 Examples:
294 sel = selector() # create a selection object
[1118]295 self.set_scans([0, 3]) # select SCANNO 0 and 3
[1005]296 scan.set_selection(sel) # set the selection
297 scan.summary() # will only print summary of scanno 0 an 3
298 scan.set_selection() # unset the selection
[946]299 """
300 self._setselection(selection)
301
[1446]302 def get_row(self, row=0, insitu=None):
303 """
304 Select a row in the scantable.
305 Return a scantable with single row.
306 Parameters:
307 row: row no of integration, default is 0.
308 insitu: if False a new scantable is returned.
309 Otherwise, the scaling is done in-situ
310 The default is taken from .asaprc (False)
311 """
312 if insitu is None: insitu = rcParams['insitu']
313 if not insitu:
314 workscan = self.copy()
315 else:
316 workscan = self
317 # Select a row
318 sel=selector()
319 sel.set_scans([workscan.getscan(row)])
320 sel.set_cycles([workscan.getcycle(row)])
321 sel.set_beams([workscan.getbeam(row)])
322 sel.set_ifs([workscan.getif(row)])
323 sel.set_polarisations([workscan.getpol(row)])
324 sel.set_name(workscan._getsourcename(row))
325 workscan.set_selection(sel)
326 if not workscan.nrow() == 1:
327 msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
328 raise RuntimeError(msg)
329 del sel
330 if insitu:
331 self._assign(workscan)
332 else:
333 return workscan
334
[876]335 def stats(self, stat='stddev', mask=None):
[102]336 """
[135]337 Determine the specified statistic of the current beam/if/pol
[102]338 Takes a 'mask' as an optional parameter to specify which
339 channels should be excluded.
340 Parameters:
[135]341 stat: 'min', 'max', 'sumsq', 'sum', 'mean'
342 'var', 'stddev', 'avdev', 'rms', 'median'
343 mask: an optional mask specifying where the statistic
[102]344 should be determined.
345 Example:
[113]346 scan.set_unit('channel')
[1118]347 msk = scan.create_mask([100, 200], [500, 600])
[135]348 scan.stats(stat='mean', mask=m)
[102]349 """
350 if mask == None:
[876]351 mask = []
[1118]352 axes = ['Beam', 'IF', 'Pol', 'Time']
[876]353 if not self._check_ifs():
[1118]354 raise ValueError("Cannot apply mask as the IFs have different "
355 "number of channels. Please use setselection() "
356 "to select individual IFs")
[256]357
[876]358 statvals = self._math._stats(self, mask, stat)
359 out = ''
360 axes = []
361 for i in range(self.nrow()):
362 axis = []
363 axis.append(self.getscan(i))
364 axis.append(self.getbeam(i))
365 axis.append(self.getif(i))
366 axis.append(self.getpol(i))
367 axis.append(self.getcycle(i))
368 axes.append(axis)
369 tm = self._gettime(i)
370 src = self._getsourcename(i)
371 out += 'Scan[%d] (%s) ' % (axis[0], src)
372 out += 'Time[%s]:\n' % (tm)
373 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
374 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
375 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
376 out += '= %3.3f\n' % (statvals[i])
377 out += "--------------------------------------------------\n"
[256]378
[876]379 if rcParams['verbose']:
380 print "--------------------------------------------------"
[1118]381 print " ", stat
[876]382 print "--------------------------------------------------"
383 print out
[1295]384 #else:
385 #retval = { 'axesnames': ['scanno', 'beamno', 'ifno', 'polno', 'cycleno'],
386 # 'axes' : axes,
387 # 'data': statvals}
388 return statvals
[102]389
[1118]390 def stddev(self, mask=None):
[135]391 """
392 Determine the standard deviation of the current beam/if/pol
393 Takes a 'mask' as an optional parameter to specify which
394 channels should be excluded.
395 Parameters:
396 mask: an optional mask specifying where the standard
397 deviation should be determined.
398
399 Example:
400 scan.set_unit('channel')
[1118]401 msk = scan.create_mask([100, 200], [500, 600])
[135]402 scan.stddev(mask=m)
403 """
[1118]404 return self.stats(stat='stddev', mask=mask);
[135]405
[1003]406
[1259]407 def get_column_names(self):
[1003]408 """
409 Return a list of column names, which can be used for selection.
410 """
[1259]411 return list(Scantable.get_column_names(self))
[1003]412
[876]413 def get_tsys(self):
[113]414 """
415 Return the System temperatures.
416 Returns:
[876]417 a list of Tsys values for the current selection
[113]418 """
[256]419
[876]420 return self._row_callback(self._gettsys, "Tsys")
[256]421
[876]422 def _row_callback(self, callback, label):
423 axes = []
[1118]424 axesnames = ['scanno', 'beamno', 'ifno', 'polno', 'cycleno']
[876]425 out = ""
[1118]426 outvec = []
[876]427 for i in range(self.nrow()):
428 axis = []
429 axis.append(self.getscan(i))
430 axis.append(self.getbeam(i))
431 axis.append(self.getif(i))
432 axis.append(self.getpol(i))
433 axis.append(self.getcycle(i))
434 axes.append(axis)
435 tm = self._gettime(i)
436 src = self._getsourcename(i)
437 out += 'Scan[%d] (%s) ' % (axis[0], src)
438 out += 'Time[%s]:\n' % (tm)
439 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
440 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
441 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
442 outvec.append(callback(i))
443 out += '= %3.3f\n' % (outvec[i])
444 out += "--------------------------------------------------\n"
445 if rcParams['verbose']:
446 print "--------------------------------------------------"
447 print " %s" % (label)
448 print "--------------------------------------------------"
449 print out
[1175]450 # disabled because the vector seems more useful
451 #retval = {'axesnames': axesnames, 'axes': axes, 'data': outvec}
452 return outvec
[256]453
[1070]454 def _get_column(self, callback, row=-1):
455 """
456 """
457 if row == -1:
458 return [callback(i) for i in range(self.nrow())]
459 else:
460 if 0 <= row < self.nrow():
461 return callback(row)
[256]462
[1070]463
[1348]464 def get_time(self, row=-1, asdatetime=False):
[113]465 """
466 Get a list of time stamps for the observations.
[1348]467 Return a datetime object for each integration time stamp in the scantable.
[113]468 Parameters:
[1348]469 row: row no of integration. Default -1 return all rows
470 asdatetime: return values as datetime objects rather than strings
[113]471 Example:
472 none
473 """
[1175]474 from time import strptime
475 from datetime import datetime
[1348]476 if not asdatetime:
477 return self._get_column(self._gettime, row)
[1175]478 format = "%Y/%m/%d/%H:%M:%S"
479 if isinstance(times, list):
480 return [datetime(*strptime(i, format)[:6]) for i in times]
481 else:
482 return datetime(*strptime(times, format)[:6])
[102]483
[1348]484
485 def get_inttime(self, row=-1):
486 """
487 Get a list of integration times for the observations.
488 Return a time in seconds for each integration in the scantable.
489 Parameters:
490 row: row no of integration. Default -1 return all rows.
491 Example:
492 none
493 """
494 return self._get_column(self._getinttime, row)
495
496
[714]497 def get_sourcename(self, row=-1):
498 """
[794]499 Get a list source names for the observations.
[714]500 Return a string for each integration in the scantable.
501 Parameters:
[1348]502 row: row no of integration. Default -1 return all rows.
[714]503 Example:
504 none
505 """
[1070]506 return self._get_column(self._getsourcename, row)
[714]507
[794]508 def get_elevation(self, row=-1):
509 """
510 Get a list of elevations for the observations.
511 Return a float for each integration in the scantable.
512 Parameters:
[1348]513 row: row no of integration. Default -1 return all rows.
[794]514 Example:
515 none
516 """
[1070]517 return self._get_column(self._getelevation, row)
[794]518
519 def get_azimuth(self, row=-1):
520 """
521 Get a list of azimuths for the observations.
522 Return a float for each integration in the scantable.
523 Parameters:
[1348]524 row: row no of integration. Default -1 return all rows.
[794]525 Example:
526 none
527 """
[1070]528 return self._get_column(self._getazimuth, row)
[794]529
530 def get_parangle(self, row=-1):
531 """
532 Get a list of parallactic angles for the observations.
533 Return a float for each integration in the scantable.
534 Parameters:
[1348]535 row: row no of integration. Default -1 return all rows.
[794]536 Example:
537 none
538 """
[1070]539 return self._get_column(self._getparangle, row)
[794]540
[1070]541 def get_direction(self, row=-1):
542 """
543 Get a list of Positions on the sky (direction) for the observations.
544 Return a float for each integration in the scantable.
545 Parameters:
546 row: row no of integration. Default -1 return all rows
547 Example:
548 none
549 """
550 return self._get_column(self._getdirection, row)
551
[1389]552 def get_directionval(self, row=-1):
553 """
554 Get a list of Positions on the sky (direction) for the observations.
555 Return a float for each integration in the scantable.
556 Parameters:
557 row: row no of integration. Default -1 return all rows
558 Example:
559 none
560 """
561 return self._get_column(self._getdirectionvec, row)
562
[102]563 def set_unit(self, unit='channel'):
564 """
565 Set the unit for all following operations on this scantable
566 Parameters:
567 unit: optional unit, default is 'channel'
[1118]568 one of '*Hz', 'km/s', 'channel', ''
[102]569 """
[484]570 varlist = vars()
[1118]571 if unit in ['', 'pixel', 'channel']:
[113]572 unit = ''
573 inf = list(self._getcoordinfo())
574 inf[0] = unit
575 self._setcoordinfo(inf)
[1118]576 self._add_history("set_unit", varlist)
[113]577
[484]578 def set_instrument(self, instr):
[358]579 """
[1348]580 Set the instrument for subsequent processing.
[358]581 Parameters:
[710]582 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]583 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[358]584 """
585 self._setInstrument(instr)
[1118]586 self._add_history("set_instument", vars())
[718]587 print_log()
[358]588
[1190]589 def set_feedtype(self, feedtype):
590 """
591 Overwrite the feed type, which might not be set correctly.
592 Parameters:
593 feedtype: 'linear' or 'circular'
594 """
595 self._setfeedtype(feedtype)
596 self._add_history("set_feedtype", vars())
597 print_log()
598
[276]599 def set_doppler(self, doppler='RADIO'):
600 """
601 Set the doppler for all following operations on this scantable.
602 Parameters:
603 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
604 """
[484]605 varlist = vars()
[276]606 inf = list(self._getcoordinfo())
607 inf[2] = doppler
608 self._setcoordinfo(inf)
[1118]609 self._add_history("set_doppler", vars())
[718]610 print_log()
[710]611
[226]612 def set_freqframe(self, frame=None):
[113]613 """
614 Set the frame type of the Spectral Axis.
615 Parameters:
[591]616 frame: an optional frame type, default 'LSRK'. Valid frames are:
[1118]617 'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
618 'GEO', 'GALACTO', 'LGROUP', 'CMB'
[113]619 Examples:
620 scan.set_freqframe('BARY')
621 """
[484]622 if frame is None: frame = rcParams['scantable.freqframe']
623 varlist = vars()
[1118]624 valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
625 'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]626
[989]627 if frame in valid:
[113]628 inf = list(self._getcoordinfo())
629 inf[1] = frame
630 self._setcoordinfo(inf)
[1118]631 self._add_history("set_freqframe", varlist)
[102]632 else:
[1118]633 msg = "Please specify a valid freq type. Valid types are:\n", valid
[718]634 if rcParams['verbose']:
635 print msg
636 else:
637 raise TypeError(msg)
638 print_log()
[710]639
[989]640 def set_dirframe(self, frame=""):
641 """
642 Set the frame type of the Direction on the sky.
643 Parameters:
644 frame: an optional frame type, default ''. Valid frames are:
645 'J2000', 'B1950', 'GALACTIC'
646 Examples:
647 scan.set_dirframe('GALACTIC')
648 """
649 varlist = vars()
650 try:
651 Scantable.set_dirframe(self, frame)
[1118]652 except RuntimeError, msg:
[989]653 if rcParams['verbose']:
654 print msg
655 else:
656 raise
[1118]657 self._add_history("set_dirframe", varlist)
[989]658
[113]659 def get_unit(self):
660 """
661 Get the default unit set in this scantable
662 Returns:
663 A unit string
664 """
665 inf = self._getcoordinfo()
666 unit = inf[0]
667 if unit == '': unit = 'channel'
668 return unit
[102]669
[158]670 def get_abcissa(self, rowno=0):
[102]671 """
[158]672 Get the abcissa in the current coordinate setup for the currently
[113]673 selected Beam/IF/Pol
674 Parameters:
[226]675 rowno: an optional row number in the scantable. Default is the
676 first row, i.e. rowno=0
[113]677 Returns:
[1348]678 The abcissa values and the format string (as a dictionary)
[113]679 """
[256]680 abc = self._getabcissa(rowno)
[710]681 lbl = self._getabcissalabel(rowno)
[718]682 print_log()
[158]683 return abc, lbl
[113]684
[1401]685 def flag(self, mask=None, unflag=False):
[1001]686 """
687 Flag the selected data using an optional channel mask.
688 Parameters:
689 mask: an optional channel mask, created with create_mask. Default
690 (no mask) is all channels.
[1401]691 unflag: if True, unflag the data
[1001]692 """
693 varlist = vars()
[1118]694 if mask is None:
695 mask = []
[1001]696 try:
[1401]697 self._flag(mask, unflag)
[1118]698 except RuntimeError, msg:
[1001]699 if rcParams['verbose']:
700 print msg
701 return
702 else: raise
703 self._add_history("flag", varlist)
704
[1203]705 def lag_flag(self, frequency, width=0.0, unit="GHz", insitu=None):
[1192]706 """
707 Flag the data in 'lag' space by providing a frequency to remove.
708 Flagged data in the scantable gets set to 0.0 before the fft.
709 No taper is applied.
710 Parameters:
[1348]711 frequency: the frequency (really a period within the bandwidth)
712 to remove
713 width: the width of the frequency to remove, to remove a
714 range of frequencies aroung the centre.
[1203]715 unit: the frequency unit (default "GHz")
716 Notes:
[1348]717 It is recommended to flag edges of the band or strong
718 signals beforehand.
[1192]719 """
720 if insitu is None: insitu = rcParams['insitu']
721 self._math._setinsitu(insitu)
722 varlist = vars()
[1370]723 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1. }
[1192]724 if not base.has_key(unit):
725 raise ValueError("%s is not a valid unit." % unit)
726 try:
[1200]727 s = scantable(self._math._lag_flag(self, frequency*base[unit],
728 width*base[unit]))
[1192]729 except RuntimeError, msg:
730 if rcParams['verbose']:
731 print msg
732 return
733 else: raise
734 s._add_history("lag_flag", varlist)
735 print_log()
736 if insitu:
737 self._assign(s)
738 else:
739 return s
[1001]740
[1192]741
[113]742 def create_mask(self, *args, **kwargs):
743 """
[1118]744 Compute and return a mask based on [min, max] windows.
[189]745 The specified windows are to be INCLUDED, when the mask is
[113]746 applied.
[102]747 Parameters:
[1118]748 [min, max], [min2, max2], ...
[1024]749 Pairs of start/end points (inclusive)specifying the regions
[102]750 to be masked
[189]751 invert: optional argument. If specified as True,
752 return an inverted mask, i.e. the regions
753 specified are EXCLUDED
[513]754 row: create the mask using the specified row for
755 unit conversions, default is row=0
756 only necessary if frequency varies over rows.
[102]757 Example:
[113]758 scan.set_unit('channel')
759 a)
[1118]760 msk = scan.create_mask([400, 500], [800, 900])
[189]761 # masks everything outside 400 and 500
[113]762 # and 800 and 900 in the unit 'channel'
763
764 b)
[1118]765 msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]766 # masks the regions between 400 and 500
[113]767 # and 800 and 900 in the unit 'channel'
[1024]768 c)
769 mask only channel 400
[1118]770 msk = scan.create_mask([400, 400])
[102]771 """
[513]772 row = 0
773 if kwargs.has_key("row"):
774 row = kwargs.get("row")
775 data = self._getabcissa(row)
[113]776 u = self._getcoordinfo()[0]
[718]777 if rcParams['verbose']:
[113]778 if u == "": u = "channel"
[718]779 msg = "The current mask window unit is %s" % u
[1118]780 i = self._check_ifs()
781 if not i:
[876]782 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
[718]783 asaplog.push(msg)
[102]784 n = self.nchan()
[1295]785 msk = _n_bools(n, False)
[710]786 # test if args is a 'list' or a 'normal *args - UGLY!!!
787
[1118]788 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
789 and args or args[0]
[710]790 for window in ws:
[102]791 if (len(window) != 2 or window[0] > window[1] ):
[1118]792 raise TypeError("A window needs to be defined as [min, max]")
[102]793 for i in range(n):
[1024]794 if data[i] >= window[0] and data[i] <= window[1]:
[1295]795 msk[i] = True
[113]796 if kwargs.has_key('invert'):
797 if kwargs.get('invert'):
[1295]798 msk = mask_not(msk)
[718]799 print_log()
[102]800 return msk
[710]801
[1446]802 def get_masklist(self, mask=None, row=0):
[256]803 """
[1446]804 Compute and return a list of mask windows, [min, max].
805 Parameters:
806 mask: channel mask, created with create_mask.
807 row: calcutate the masklist using the specified row
808 for unit conversions, default is row=0
809 only necessary if frequency varies over rows.
810 Returns:
811 [min, max], [min2, max2], ...
812 Pairs of start/end points (inclusive)specifying
813 the masked regions
814 """
815 if not (isinstance(mask,list) or isinstance(mask, tuple)):
816 raise TypeError("The mask should be list or tuple.")
817 if len(mask) < 2:
818 raise TypeError("The mask elements should be > 1")
819 if self.nchan() != len(mask):
820 msg = "Number of channels in scantable != number of mask elements"
821 raise TypeError(msg)
822 data = self._getabcissa(row)
823 u = self._getcoordinfo()[0]
824 if rcParams['verbose']:
825 if u == "": u = "channel"
826 msg = "The current mask window unit is %s" % u
827 i = self._check_ifs()
828 if not i:
829 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
830 asaplog.push(msg)
831 masklist=[]
832 ist, ien = None, None
833 ist, ien=self.get_mask_indices(mask)
834 if ist is not None and ien is not None:
835 for i in xrange(len(ist)):
836 range=[data[ist[i]],data[ien[i]]]
837 range.sort()
838 masklist.append([range[0],range[1]])
839 return masklist
840
841 def get_mask_indices(self, mask=None):
842 """
843 Compute and Return lists of mask start indices and mask end indices.
844 Parameters:
845 mask: channel mask, created with create_mask.
846 Returns:
847 List of mask start indices and that of mask end indices,
848 i.e., [istart1,istart2,....], [iend1,iend2,....].
849 """
850 if not (isinstance(mask,list) or isinstance(mask, tuple)):
851 raise TypeError("The mask should be list or tuple.")
852 if len(mask) < 2:
853 raise TypeError("The mask elements should be > 1")
854 istart=[]
855 iend=[]
856 if mask[0]: istart.append(0)
857 for i in range(len(mask)-1):
858 if not mask[i] and mask[i+1]:
859 istart.append(i+1)
860 elif mask[i] and not mask[i+1]:
861 iend.append(i)
862 if mask[len(mask)-1]: iend.append(len(mask)-1)
863 if len(istart) != len(iend):
864 raise RuntimeError("Numbers of mask start != mask end.")
865 for i in range(len(istart)):
866 if istart[i] > iend[i]:
867 raise RuntimeError("Mask start index > mask end index")
868 break
869 return istart,iend
870
871# def get_restfreqs(self):
872# """
873# Get the restfrequency(s) stored in this scantable.
874# The return value(s) are always of unit 'Hz'
875# Parameters:
876# none
877# Returns:
878# a list of doubles
879# """
880# return list(self._getrestfreqs())
881
882 def get_restfreqs(self, ids=None):
883 """
[256]884 Get the restfrequency(s) stored in this scantable.
885 The return value(s) are always of unit 'Hz'
886 Parameters:
[1446]887 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
888 be retrieved
[256]889 Returns:
[1446]890 dictionary containing ids and a list of doubles for each id
[256]891 """
[1446]892 if ids is None:
893 rfreqs={}
894 idlist = self.getmolnos()
895 for i in idlist:
896 rfreqs[i]=list(self._getrestfreqs(i))
897 return rfreqs
898 else:
899 if type(ids)==list or type(ids)==tuple:
900 rfreqs={}
901 for i in ids:
902 rfreqs[i]=list(self._getrestfreqs(i))
903 return rfreqs
904 else:
905 return list(self._getrestfreqs(ids))
906 #return list(self._getrestfreqs(ids))
[102]907
[931]908 def set_restfreqs(self, freqs=None, unit='Hz'):
909 """
[1446]910 ********NEED TO BE UPDATED begin************
[931]911 Set or replace the restfrequency specified and
912 If the 'freqs' argument holds a scalar,
913 then that rest frequency will be applied to all the selected
914 data. If the 'freqs' argument holds
915 a vector, then it MUST be of equal or smaller length than
916 the number of IFs (and the available restfrequencies will be
917 replaced by this vector). In this case, *all* data have
918 the restfrequency set per IF according
919 to the corresponding value you give in the 'freqs' vector.
[1118]920 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
[931]921 IF 1 gets restfreq 2e9.
[1446]922 ********NEED TO BE UPDATED end************
[1153]923 You can also specify the frequencies via a linecatalog/
924
[931]925 Parameters:
926 freqs: list of rest frequency values or string idenitfiers
927 unit: unit for rest frequency (default 'Hz')
[402]928
[931]929 Example:
[1446]930 # set the given restfrequency for the all currently selected IFs
[931]931 scan.set_restfreqs(freqs=1.4e9)
[1446]932 # set multiple restfrequencies to all the selected data
933 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
934 # If the number of IFs in the data is >= 2 the IF0 gets the first
935 # value IF1 the second... NOTE that freqs needs to be
936 # specified in list of list (e.g. [[],[],...] ).
937 scan.set_restfreqs(freqs=[[1.4e9],[1.67e9]])
[931]938 #set the given restfrequency for the whole table (by name)
939 scan.set_restfreqs(freqs="OH1667")
[391]940
[931]941 Note:
942 To do more sophisticate Restfrequency setting, e.g. on a
943 source and IF basis, use scantable.set_selection() before using
944 this function.
945 # provide your scantable is call scan
946 selection = selector()
947 selection.set_name("ORION*")
948 selection.set_ifs([1])
949 scan.set_selection(selection)
950 scan.set_restfreqs(freqs=86.6e9)
951
952 """
953 varlist = vars()
[1157]954 from asap import linecatalog
955 # simple value
[1118]956 if isinstance(freqs, int) or isinstance(freqs, float):
[1446]957 # TT mod
958 #self._setrestfreqs(freqs, "",unit)
959 self._setrestfreqs([freqs], [""],unit)
[1157]960 # list of values
[1118]961 elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]962 # list values are scalars
[1118]963 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[1446]964 self._setrestfreqs(freqs, [""],unit)
[1157]965 # list values are tuples, (value, name)
966 elif isinstance(freqs[-1], dict):
[1446]967 #sel = selector()
968 #savesel = self._getselection()
969 #iflist = self.getifnos()
970 #for i in xrange(len(freqs)):
971 # sel.set_ifs(iflist[i])
972 # self._setselection(sel)
973 # self._setrestfreqs(freqs[i], "",unit)
974 #self._setselection(savesel)
975 self._setrestfreqs(freqs["value"],
976 freqs["name"], "MHz")
977 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
[1157]978 sel = selector()
979 savesel = self._getselection()
[1322]980 iflist = self.getifnos()
[1446]981 if len(freqs)>len(iflist):
982 raise ValueError("number of elements in list of list exeeds the current IF selections")
[1157]983 for i in xrange(len(freqs)):
[1322]984 sel.set_ifs(iflist[i])
[1259]985 self._setselection(sel)
[1157]986 self._setrestfreqs(freqs[i]["value"],
987 freqs[i]["name"], "MHz")
988 self._setselection(savesel)
989 # freqs are to be taken from a linecatalog
[1153]990 elif isinstance(freqs, linecatalog):
991 sel = selector()
992 savesel = self._getselection()
993 for i in xrange(freqs.nrow()):
[1322]994 sel.set_ifs(iflist[i])
[1153]995 self._setselection(sel)
996 self._setrestfreqs(freqs.get_frequency(i),
997 freqs.get_name(i), "MHz")
998 # ensure that we are not iterating past nIF
999 if i == self.nif()-1: break
1000 self._setselection(savesel)
[931]1001 else:
1002 return
1003 self._add_history("set_restfreqs", varlist)
1004
[1360]1005 def shift_refpix(self, delta):
1006 """
1007 Shift the reference pixel of the Spectra Coordinate by an
1008 integer amount.
1009 Parameters:
1010 delta: the amount to shift by
1011 Note:
1012 Be careful using this with broadband data.
1013 """
1014 Scantable.shift(self, delta)
[931]1015
[1259]1016 def history(self, filename=None):
1017 """
1018 Print the history. Optionally to a file.
[1348]1019 Parameters:
1020 filename: The name of the file to save the history to.
[1259]1021 """
[484]1022 hist = list(self._gethistory())
[794]1023 out = "-"*80
[484]1024 for h in hist:
[489]1025 if h.startswith("---"):
[794]1026 out += "\n"+h
[489]1027 else:
1028 items = h.split("##")
1029 date = items[0]
1030 func = items[1]
1031 items = items[2:]
[794]1032 out += "\n"+date+"\n"
1033 out += "Function: %s\n Parameters:" % (func)
[489]1034 for i in items:
1035 s = i.split("=")
[1118]1036 out += "\n %s = %s" % (s[0], s[1])
[794]1037 out += "\n"+"-"*80
[1259]1038 if filename is not None:
1039 if filename is "":
1040 filename = 'scantable_history.txt'
1041 import os
1042 filename = os.path.expandvars(os.path.expanduser(filename))
1043 if not os.path.isdir(filename):
1044 data = open(filename, 'w')
1045 data.write(out)
1046 data.close()
1047 else:
1048 msg = "Illegal file name '%s'." % (filename)
1049 if rcParams['verbose']:
1050 print msg
1051 else:
1052 raise IOError(msg)
1053 if rcParams['verbose']:
1054 try:
1055 from IPython.genutils import page as pager
1056 except ImportError:
1057 from pydoc import pager
1058 pager(out)
1059 else:
1060 return out
[484]1061 return
[513]1062 #
1063 # Maths business
1064 #
1065
[931]1066 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
[513]1067 """
[1070]1068 Return the (time) weighted average of a scan.
[513]1069 Note:
[1070]1070 in channels only - align if necessary
[513]1071 Parameters:
1072 mask: an optional mask (only used for 'var' and 'tsys'
1073 weighting)
[558]1074 scanav: True averages each scan separately
1075 False (default) averages all scans together,
[1099]1076 weight: Weighting scheme.
1077 'none' (mean no weight)
1078 'var' (1/var(spec) weighted)
1079 'tsys' (1/Tsys**2 weighted)
1080 'tint' (integration time weighted)
1081 'tintsys' (Tint/Tsys**2)
1082 'median' ( median averaging)
[535]1083 The default is 'tint'
[931]1084 align: align the spectra in velocity before averaging. It takes
1085 the time of the first spectrum as reference time.
[513]1086 Example:
1087 # time average the scantable without using a mask
[710]1088 newscan = scan.average_time()
[513]1089 """
1090 varlist = vars()
[976]1091 if weight is None: weight = 'TINT'
[513]1092 if mask is None: mask = ()
[1099]1093 if scanav: scanav = "SCAN"
1094 else: scanav = "NONE"
[1118]1095 scan = (self, )
[989]1096 try:
[1118]1097 if align:
1098 scan = (self.freq_align(insitu=False), )
1099 s = None
1100 if weight.upper() == 'MEDIAN':
1101 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1102 scanav))
1103 else:
1104 s = scantable(self._math._average(scan, mask, weight.upper(),
1105 scanav))
1106 except RuntimeError, msg:
[989]1107 if rcParams['verbose']:
1108 print msg
1109 return
1110 else: raise
[1099]1111 s._add_history("average_time", varlist)
[718]1112 print_log()
[513]1113 return s
[710]1114
[876]1115 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[513]1116 """
1117 Return a scan where all spectra are converted to either
1118 Jansky or Kelvin depending upon the flux units of the scan table.
1119 By default the function tries to look the values up internally.
1120 If it can't find them (or if you want to over-ride), you must
1121 specify EITHER jyperk OR eta (and D which it will try to look up
1122 also if you don't set it). jyperk takes precedence if you set both.
1123 Parameters:
1124 jyperk: the Jy / K conversion factor
1125 eta: the aperture efficiency
1126 d: the geomtric diameter (metres)
1127 insitu: if False a new scantable is returned.
1128 Otherwise, the scaling is done in-situ
1129 The default is taken from .asaprc (False)
1130 """
1131 if insitu is None: insitu = rcParams['insitu']
[876]1132 self._math._setinsitu(insitu)
[513]1133 varlist = vars()
1134 if jyperk is None: jyperk = -1.0
1135 if d is None: d = -1.0
1136 if eta is None: eta = -1.0
[876]1137 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1138 s._add_history("convert_flux", varlist)
1139 print_log()
1140 if insitu: self._assign(s)
1141 else: return s
[513]1142
[876]1143 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[513]1144 """
1145 Return a scan after applying a gain-elevation correction.
1146 The correction can be made via either a polynomial or a
1147 table-based interpolation (and extrapolation if necessary).
1148 You specify polynomial coefficients, an ascii table or neither.
1149 If you specify neither, then a polynomial correction will be made
1150 with built in coefficients known for certain telescopes (an error
1151 will occur if the instrument is not known).
1152 The data and Tsys are *divided* by the scaling factors.
1153 Parameters:
1154 poly: Polynomial coefficients (default None) to compute a
1155 gain-elevation correction as a function of
1156 elevation (in degrees).
1157 filename: The name of an ascii file holding correction factors.
1158 The first row of the ascii file must give the column
1159 names and these MUST include columns
1160 "ELEVATION" (degrees) and "FACTOR" (multiply data
1161 by this) somewhere.
1162 The second row must give the data type of the
1163 column. Use 'R' for Real and 'I' for Integer.
1164 An example file would be
1165 (actual factors are arbitrary) :
1166
1167 TIME ELEVATION FACTOR
1168 R R R
1169 0.1 0 0.8
1170 0.2 20 0.85
1171 0.3 40 0.9
1172 0.4 60 0.85
1173 0.5 80 0.8
1174 0.6 90 0.75
1175 method: Interpolation method when correcting from a table.
1176 Values are "nearest", "linear" (default), "cubic"
1177 and "spline"
1178 insitu: if False a new scantable is returned.
1179 Otherwise, the scaling is done in-situ
1180 The default is taken from .asaprc (False)
1181 """
1182
1183 if insitu is None: insitu = rcParams['insitu']
[876]1184 self._math._setinsitu(insitu)
[513]1185 varlist = vars()
1186 if poly is None:
[1118]1187 poly = ()
[513]1188 from os.path import expandvars
1189 filename = expandvars(filename)
[876]1190 s = scantable(self._math._gainel(self, poly, filename, method))
1191 s._add_history("gain_el", varlist)
1192 print_log()
1193 if insitu: self._assign(s)
1194 else: return s
[710]1195
[931]1196 def freq_align(self, reftime=None, method='cubic', insitu=None):
[513]1197 """
1198 Return a scan where all rows have been aligned in frequency/velocity.
1199 The alignment frequency frame (e.g. LSRK) is that set by function
1200 set_freqframe.
1201 Parameters:
1202 reftime: reference time to align at. By default, the time of
1203 the first row of data is used.
1204 method: Interpolation method for regridding the spectra.
1205 Choose from "nearest", "linear", "cubic" (default)
1206 and "spline"
1207 insitu: if False a new scantable is returned.
1208 Otherwise, the scaling is done in-situ
1209 The default is taken from .asaprc (False)
1210 """
[931]1211 if insitu is None: insitu = rcParams["insitu"]
[876]1212 self._math._setinsitu(insitu)
[513]1213 varlist = vars()
[931]1214 if reftime is None: reftime = ""
1215 s = scantable(self._math._freq_align(self, reftime, method))
[876]1216 s._add_history("freq_align", varlist)
1217 print_log()
1218 if insitu: self._assign(s)
1219 else: return s
[513]1220
[876]1221 def opacity(self, tau, insitu=None):
[513]1222 """
1223 Apply an opacity correction. The data
1224 and Tsys are multiplied by the correction factor.
1225 Parameters:
1226 tau: Opacity from which the correction factor is
1227 exp(tau*ZD)
1228 where ZD is the zenith-distance
1229 insitu: if False a new scantable is returned.
1230 Otherwise, the scaling is done in-situ
1231 The default is taken from .asaprc (False)
1232 """
1233 if insitu is None: insitu = rcParams['insitu']
[876]1234 self._math._setinsitu(insitu)
[513]1235 varlist = vars()
[876]1236 s = scantable(self._math._opacity(self, tau))
1237 s._add_history("opacity", varlist)
1238 print_log()
1239 if insitu: self._assign(s)
1240 else: return s
[513]1241
1242 def bin(self, width=5, insitu=None):
1243 """
1244 Return a scan where all spectra have been binned up.
[1348]1245 Parameters:
[513]1246 width: The bin width (default=5) in pixels
1247 insitu: if False a new scantable is returned.
1248 Otherwise, the scaling is done in-situ
1249 The default is taken from .asaprc (False)
1250 """
1251 if insitu is None: insitu = rcParams['insitu']
[876]1252 self._math._setinsitu(insitu)
[513]1253 varlist = vars()
[876]1254 s = scantable(self._math._bin(self, width))
[1118]1255 s._add_history("bin", varlist)
[876]1256 print_log()
1257 if insitu: self._assign(s)
1258 else: return s
[513]1259
[710]1260
[513]1261 def resample(self, width=5, method='cubic', insitu=None):
1262 """
[1348]1263 Return a scan where all spectra have been binned up.
1264
1265 Parameters:
[513]1266 width: The bin width (default=5) in pixels
1267 method: Interpolation method when correcting from a table.
1268 Values are "nearest", "linear", "cubic" (default)
1269 and "spline"
1270 insitu: if False a new scantable is returned.
1271 Otherwise, the scaling is done in-situ
1272 The default is taken from .asaprc (False)
1273 """
1274 if insitu is None: insitu = rcParams['insitu']
[876]1275 self._math._setinsitu(insitu)
[513]1276 varlist = vars()
[876]1277 s = scantable(self._math._resample(self, method, width))
[1118]1278 s._add_history("resample", varlist)
[876]1279 print_log()
1280 if insitu: self._assign(s)
1281 else: return s
[513]1282
1283
[946]1284 def average_pol(self, mask=None, weight='none'):
1285 """
1286 Average the Polarisations together.
1287 Parameters:
1288 mask: An optional mask defining the region, where the
1289 averaging will be applied. The output will have all
1290 specified points masked.
1291 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1292 weighted), or 'tsys' (1/Tsys**2 weighted)
1293 """
1294 varlist = vars()
1295 if mask is None:
1296 mask = ()
[1010]1297 s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]1298 s._add_history("average_pol", varlist)
[946]1299 print_log()
[992]1300 return s
[513]1301
[1145]1302 def average_beam(self, mask=None, weight='none'):
1303 """
1304 Average the Beams together.
1305 Parameters:
1306 mask: An optional mask defining the region, where the
1307 averaging will be applied. The output will have all
1308 specified points masked.
1309 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1310 weighted), or 'tsys' (1/Tsys**2 weighted)
1311 """
1312 varlist = vars()
1313 if mask is None:
1314 mask = ()
1315 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1316 s._add_history("average_beam", varlist)
1317 print_log()
1318 return s
1319
[992]1320 def convert_pol(self, poltype=None):
1321 """
1322 Convert the data to a different polarisation type.
1323 Parameters:
1324 poltype: The new polarisation type. Valid types are:
1325 "linear", "stokes" and "circular"
1326 """
1327 varlist = vars()
1328 try:
1329 s = scantable(self._math._convertpol(self, poltype))
[1118]1330 except RuntimeError, msg:
[992]1331 if rcParams['verbose']:
[1118]1332 print msg
1333 return
[992]1334 else:
1335 raise
[1118]1336 s._add_history("convert_pol", varlist)
[992]1337 print_log()
1338 return s
1339
[876]1340 def smooth(self, kernel="hanning", width=5.0, insitu=None):
[513]1341 """
1342 Smooth the spectrum by the specified kernel (conserving flux).
1343 Parameters:
1344 kernel: The type of smoothing kernel. Select from
[1373]1345 'hanning' (default), 'gaussian', 'boxcar' and
1346 'rmedian'
[513]1347 width: The width of the kernel in pixels. For hanning this is
1348 ignored otherwise it defauls to 5 pixels.
1349 For 'gaussian' it is the Full Width Half
1350 Maximum. For 'boxcar' it is the full width.
[1373]1351 For 'rmedian' it is the half width.
[513]1352 insitu: if False a new scantable is returned.
1353 Otherwise, the scaling is done in-situ
1354 The default is taken from .asaprc (False)
1355 Example:
1356 none
1357 """
1358 if insitu is None: insitu = rcParams['insitu']
[876]1359 self._math._setinsitu(insitu)
[513]1360 varlist = vars()
[1118]1361 s = scantable(self._math._smooth(self, kernel.lower(), width))
[876]1362 s._add_history("smooth", varlist)
1363 print_log()
1364 if insitu: self._assign(s)
1365 else: return s
[513]1366
[876]1367
[1389]1368 def poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None):
[513]1369 """
1370 Return a scan which has been baselined (all rows) by a polynomial.
1371 Parameters:
[794]1372 mask: an optional mask
1373 order: the order of the polynomial (default is 0)
[1061]1374 plot: plot the fit and the residual. In this each
1375 indivual fit has to be approved, by typing 'y'
1376 or 'n'
[1389]1377 uselin: use linear polynomial fit
[794]1378 insitu: if False a new scantable is returned.
1379 Otherwise, the scaling is done in-situ
1380 The default is taken from .asaprc (False)
[513]1381 Example:
1382 # return a scan baselined by a third order polynomial,
1383 # not using a mask
1384 bscan = scan.poly_baseline(order=3)
[579]1385 """
[513]1386 if insitu is None: insitu = rcParams['insitu']
1387 varlist = vars()
1388 if mask is None:
[1295]1389 mask = [True for i in xrange(self.nchan(-1))]
[513]1390 from asap.asapfitter import fitter
[1217]1391 try:
1392 f = fitter()
1393 f.set_scan(self, mask)
[1389]1394 if uselin:
1395 f.set_function(lpoly=order)
1396 else:
1397 f.set_function(poly=order)
[1217]1398 s = f.auto_fit(insitu, plot=plot)
[1446]1399 # Save parameters of baseline fits as a class attribute.
1400 # NOTICE: It does not reflect changes in scantable!
1401 self.blpars = f.blpars
[1217]1402 s._add_history("poly_baseline", varlist)
1403 print_log()
1404 if insitu: self._assign(s)
1405 else: return s
1406 except RuntimeError:
1407 msg = "The fit failed, possibly because it didn't converge."
1408 if rcParams['verbose']:
1409 print msg
1410 return
1411 else:
1412 raise RuntimeError(msg)
[513]1413
[1217]1414
[1118]1415 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
[1280]1416 threshold=3, chan_avg_limit=1, plot=False,
1417 insitu=None):
[880]1418 """
1419 Return a scan which has been baselined (all rows) by a polynomial.
1420 Spectral lines are detected first using linefinder and masked out
1421 to avoid them affecting the baseline solution.
1422
1423 Parameters:
1424 mask: an optional mask retreived from scantable
1425 edge: an optional number of channel to drop at
1426 the edge of spectrum. If only one value is
1427 specified, the same number will be dropped from
1428 both sides of the spectrum. Default is to keep
[907]1429 all channels. Nested tuples represent individual
[976]1430 edge selection for different IFs (a number of spectral
1431 channels can be different)
[880]1432 order: the order of the polynomial (default is 0)
1433 threshold: the threshold used by line finder. It is better to
1434 keep it large as only strong lines affect the
1435 baseline solution.
[1280]1436 chan_avg_limit:
1437 a maximum number of consequtive spectral channels to
1438 average during the search of weak and broad lines.
1439 The default is no averaging (and no search for weak
1440 lines). If such lines can affect the fitted baseline
1441 (e.g. a high order polynomial is fitted), increase this
1442 parameter (usually values up to 8 are reasonable). Most
1443 users of this method should find the default value
1444 sufficient.
[1061]1445 plot: plot the fit and the residual. In this each
1446 indivual fit has to be approved, by typing 'y'
1447 or 'n'
[880]1448 insitu: if False a new scantable is returned.
1449 Otherwise, the scaling is done in-situ
1450 The default is taken from .asaprc (False)
1451
1452 Example:
1453 scan2=scan.auto_poly_baseline(order=7)
1454 """
1455 if insitu is None: insitu = rcParams['insitu']
1456 varlist = vars()
1457 from asap.asapfitter import fitter
1458 from asap.asaplinefind import linefinder
1459 from asap import _is_sequence_or_number as _is_valid
1460
[976]1461 # check whether edge is set up for each IF individually
[1118]1462 individualedge = False;
1463 if len(edge) > 1:
1464 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1465 individualedge = True;
[907]1466
[1118]1467 if not _is_valid(edge, int) and not individualedge:
[909]1468 raise ValueError, "Parameter 'edge' has to be an integer or a \
[907]1469 pair of integers specified as a tuple. Nested tuples are allowed \
1470 to make individual selection for different IFs."
[919]1471
[1118]1472 curedge = (0, 0)
1473 if individualedge:
1474 for edgepar in edge:
1475 if not _is_valid(edgepar, int):
1476 raise ValueError, "Each element of the 'edge' tuple has \
1477 to be a pair of integers or an integer."
[907]1478 else:
[1118]1479 curedge = edge;
[880]1480
1481 # setup fitter
1482 f = fitter()
1483 f.set_function(poly=order)
1484
1485 # setup line finder
[1118]1486 fl = linefinder()
[1268]1487 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
[880]1488
1489 if not insitu:
[1118]1490 workscan = self.copy()
[880]1491 else:
[1118]1492 workscan = self
[880]1493
[907]1494 fl.set_scan(workscan)
1495
[1118]1496 rows = range(workscan.nrow())
[1446]1497 # Save parameters of baseline fits & masklists as a class attribute.
1498 # NOTICE: It does not reflect changes in scantable!
1499 if len(rows) > 0:
1500 self.blpars=[]
1501 self.masklists=[]
[880]1502 asaplog.push("Processing:")
1503 for r in rows:
[1118]1504 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1505 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1506 workscan.getpol(r), workscan.getcycle(r))
[880]1507 asaplog.push(msg, False)
[907]1508
[976]1509 # figure out edge parameter
[1118]1510 if individualedge:
1511 if len(edge) >= workscan.getif(r):
1512 raise RuntimeError, "Number of edge elements appear to " \
1513 "be less than the number of IFs"
1514 curedge = edge[workscan.getif(r)]
[919]1515
[976]1516 # setup line finder
[1118]1517 fl.find_lines(r, mask, curedge)
[1446]1518 outmask=fl.get_mask()
[880]1519 f.set_scan(workscan, fl.get_mask())
1520 f.x = workscan._getabcissa(r)
1521 f.y = workscan._getspectrum(r)
1522 f.data = None
1523 f.fit()
[1446]1524
1525 # Show mask list
1526 masklist=workscan.get_masklist(fl.get_mask(),row=r)
1527 msg = "mask range: "+str(masklist)
1528 asaplog.push(msg, False)
1529
1530 fpar = f.get_parameters()
[1061]1531 if plot:
1532 f.plot(residual=True)
1533 x = raw_input("Accept fit ( [y]/n ): ")
1534 if x.upper() == 'N':
[1446]1535 self.blpars.append(None)
1536 self.masklists.append(None)
[1061]1537 continue
[880]1538 workscan._setspectrum(f.fitter.getresidual(), r)
[1446]1539 self.blpars.append(fpar)
1540 self.masklists.append(masklist)
[1061]1541 if plot:
1542 f._p.unmap()
1543 f._p = None
1544 workscan._add_history("auto_poly_baseline", varlist)
[880]1545 if insitu:
1546 self._assign(workscan)
1547 else:
1548 return workscan
1549
[914]1550 def rotate_linpolphase(self, angle):
1551 """
1552 Rotate the phase of the complex polarization O=Q+iU correlation.
1553 This is always done in situ in the raw data. So if you call this
1554 function more than once then each call rotates the phase further.
1555 Parameters:
1556 angle: The angle (degrees) to rotate (add) by.
1557 Examples:
1558 scan.rotate_linpolphase(2.3)
1559 """
1560 varlist = vars()
[936]1561 self._math._rotate_linpolphase(self, angle)
[914]1562 self._add_history("rotate_linpolphase", varlist)
1563 print_log()
1564 return
[710]1565
[513]1566
[914]1567 def rotate_xyphase(self, angle):
1568 """
1569 Rotate the phase of the XY correlation. This is always done in situ
1570 in the data. So if you call this function more than once
1571 then each call rotates the phase further.
1572 Parameters:
1573 angle: The angle (degrees) to rotate (add) by.
1574 Examples:
1575 scan.rotate_xyphase(2.3)
1576 """
1577 varlist = vars()
[936]1578 self._math._rotate_xyphase(self, angle)
[914]1579 self._add_history("rotate_xyphase", varlist)
1580 print_log()
1581 return
1582
1583 def swap_linears(self):
1584 """
[1348]1585 Swap the linear polarisations XX and YY, or better the first two
1586 polarisations as this also works for ciculars.
[914]1587 """
1588 varlist = vars()
[936]1589 self._math._swap_linears(self)
[914]1590 self._add_history("swap_linears", varlist)
1591 print_log()
1592 return
1593
1594 def invert_phase(self):
1595 """
1596 Invert the phase of the complex polarisation
1597 """
1598 varlist = vars()
[936]1599 self._math._invert_phase(self)
[914]1600 self._add_history("invert_phase", varlist)
1601 print_log()
1602 return
1603
[876]1604 def add(self, offset, insitu=None):
[513]1605 """
1606 Return a scan where all spectra have the offset added
1607 Parameters:
1608 offset: the offset
1609 insitu: if False a new scantable is returned.
1610 Otherwise, the scaling is done in-situ
1611 The default is taken from .asaprc (False)
1612 """
1613 if insitu is None: insitu = rcParams['insitu']
[876]1614 self._math._setinsitu(insitu)
[513]1615 varlist = vars()
[876]1616 s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]1617 s._add_history("add", varlist)
[876]1618 print_log()
1619 if insitu:
1620 self._assign(s)
1621 else:
[513]1622 return s
1623
[1308]1624 def scale(self, factor, tsys=True, insitu=None):
[513]1625 """
1626 Return a scan where all spectra are scaled by the give 'factor'
1627 Parameters:
1628 factor: the scaling factor
1629 insitu: if False a new scantable is returned.
1630 Otherwise, the scaling is done in-situ
1631 The default is taken from .asaprc (False)
1632 tsys: if True (default) then apply the operation to Tsys
1633 as well as the data
1634 """
1635 if insitu is None: insitu = rcParams['insitu']
[876]1636 self._math._setinsitu(insitu)
[513]1637 varlist = vars()
[876]1638 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
[1118]1639 s._add_history("scale", varlist)
[876]1640 print_log()
1641 if insitu:
1642 self._assign(s)
1643 else:
[513]1644 return s
1645
[1348]1646 def auto_quotient(self, preserve=True, mode='paired'):
[670]1647 """
1648 This function allows to build quotients automatically.
1649 It assumes the observation to have the same numer of
1650 "ons" and "offs"
1651 Parameters:
[710]1652 preserve: you can preserve (default) the continuum or
1653 remove it. The equations used are
[670]1654 preserve: Output = Toff * (on/off) - Toff
[1070]1655 remove: Output = Toff * (on/off) - Ton
[1348]1656 mode: the on/off detection mode
1657 'paired' (default)
1658 identifies 'off' scans by the
1659 trailing '_R' (Mopra/Parkes) or
1660 '_e'/'_w' (Tid) and matches
1661 on/off pairs from the observing pattern
1662 'time'
1663 finds the closest off in time
1664
[670]1665 """
[1348]1666 modes = ["time", "paired"]
[670]1667 if not mode in modes:
[876]1668 msg = "please provide valid mode. Valid modes are %s" % (modes)
1669 raise ValueError(msg)
1670 varlist = vars()
[1348]1671 s = None
1672 if mode.lower() == "paired":
1673 basesel = self.get_selection()
[1356]1674 sel = selector()+basesel
1675 sel.set_query("SRCTYPE==1")
1676 self.set_selection(sel)
[1348]1677 offs = self.copy()
1678 sel.set_query("SRCTYPE==0")
[1356]1679 self.set_selection(sel)
[1348]1680 ons = self.copy()
1681 s = scantable(self._math._quotient(ons, offs, preserve))
1682 self.set_selection(basesel)
1683 elif mode.lower() == "time":
1684 s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]1685 s._add_history("auto_quotient", varlist)
[876]1686 print_log()
1687 return s
[710]1688
[1145]1689 def mx_quotient(self, mask = None, weight='median', preserve=True):
[1141]1690 """
[1143]1691 Form a quotient using "off" beams when observing in "MX" mode.
1692 Parameters:
[1145]1693 mask: an optional mask to be used when weight == 'stddev'
[1143]1694 weight: How to average the off beams. Default is 'median'.
[1145]1695 preserve: you can preserve (default) the continuum or
1696 remove it. The equations used are
1697 preserve: Output = Toff * (on/off) - Toff
1698 remove: Output = Toff * (on/off) - Ton
[1217]1699 """
[1143]1700 if mask is None: mask = ()
[1141]1701 varlist = vars()
1702 on = scantable(self._math._mx_extract(self, 'on'))
[1143]1703 preoff = scantable(self._math._mx_extract(self, 'off'))
1704 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1217]1705 from asapmath import quotient
[1145]1706 q = quotient(on, off, preserve)
[1143]1707 q._add_history("mx_quotient", varlist)
[1145]1708 print_log()
[1217]1709 return q
[513]1710
[718]1711 def freq_switch(self, insitu=None):
1712 """
1713 Apply frequency switching to the data.
1714 Parameters:
1715 insitu: if False a new scantable is returned.
1716 Otherwise, the swictching is done in-situ
1717 The default is taken from .asaprc (False)
1718 Example:
1719 none
1720 """
1721 if insitu is None: insitu = rcParams['insitu']
[876]1722 self._math._setinsitu(insitu)
[718]1723 varlist = vars()
[876]1724 s = scantable(self._math._freqswitch(self))
[1118]1725 s._add_history("freq_switch", varlist)
[876]1726 print_log()
1727 if insitu: self._assign(s)
1728 else: return s
[718]1729
[780]1730 def recalc_azel(self):
1731 """
1732 Recalculate the azimuth and elevation for each position.
1733 Parameters:
1734 none
1735 Example:
1736 """
1737 varlist = vars()
[876]1738 self._recalcazel()
[780]1739 self._add_history("recalc_azel", varlist)
1740 print_log()
1741 return
1742
[513]1743 def __add__(self, other):
1744 varlist = vars()
1745 s = None
1746 if isinstance(other, scantable):
[1308]1747 s = scantable(self._math._binaryop(self, other, "ADD"))
[513]1748 elif isinstance(other, float):
[876]1749 s = scantable(self._math._unaryop(self, other, "ADD", False))
[513]1750 else:
[718]1751 raise TypeError("Other input is not a scantable or float value")
[513]1752 s._add_history("operator +", varlist)
[718]1753 print_log()
[513]1754 return s
1755
1756 def __sub__(self, other):
1757 """
1758 implicit on all axes and on Tsys
1759 """
1760 varlist = vars()
1761 s = None
1762 if isinstance(other, scantable):
[1308]1763 s = scantable(self._math._binaryop(self, other, "SUB"))
[513]1764 elif isinstance(other, float):
[876]1765 s = scantable(self._math._unaryop(self, other, "SUB", False))
[513]1766 else:
[718]1767 raise TypeError("Other input is not a scantable or float value")
[513]1768 s._add_history("operator -", varlist)
[718]1769 print_log()
[513]1770 return s
[710]1771
[513]1772 def __mul__(self, other):
1773 """
1774 implicit on all axes and on Tsys
1775 """
1776 varlist = vars()
1777 s = None
1778 if isinstance(other, scantable):
[1308]1779 s = scantable(self._math._binaryop(self, other, "MUL"))
[513]1780 elif isinstance(other, float):
[876]1781 s = scantable(self._math._unaryop(self, other, "MUL", False))
[513]1782 else:
[718]1783 raise TypeError("Other input is not a scantable or float value")
[513]1784 s._add_history("operator *", varlist)
[718]1785 print_log()
[513]1786 return s
1787
[710]1788
[513]1789 def __div__(self, other):
1790 """
1791 implicit on all axes and on Tsys
1792 """
1793 varlist = vars()
1794 s = None
1795 if isinstance(other, scantable):
[1308]1796 s = scantable(self._math._binaryop(self, other, "DIV"))
[513]1797 elif isinstance(other, float):
1798 if other == 0.0:
[718]1799 raise ZeroDivisionError("Dividing by zero is not recommended")
[876]1800 s = scantable(self._math._unaryop(self, other, "DIV", False))
[513]1801 else:
[718]1802 raise TypeError("Other input is not a scantable or float value")
[513]1803 s._add_history("operator /", varlist)
[718]1804 print_log()
[513]1805 return s
1806
[530]1807 def get_fit(self, row=0):
1808 """
1809 Print or return the stored fits for a row in the scantable
1810 Parameters:
1811 row: the row which the fit has been applied to.
1812 """
1813 if row > self.nrow():
1814 return
[976]1815 from asap.asapfit import asapfit
[530]1816 fit = asapfit(self._getfit(row))
[718]1817 if rcParams['verbose']:
[530]1818 print fit
1819 return
1820 else:
1821 return fit.as_dict()
1822
[484]1823 def _add_history(self, funcname, parameters):
1824 # create date
1825 sep = "##"
1826 from datetime import datetime
1827 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1828 hist = dstr+sep
1829 hist += funcname+sep#cdate+sep
1830 if parameters.has_key('self'): del parameters['self']
[1118]1831 for k, v in parameters.iteritems():
[484]1832 if type(v) is dict:
[1118]1833 for k2, v2 in v.iteritems():
[484]1834 hist += k2
1835 hist += "="
[1118]1836 if isinstance(v2, scantable):
[484]1837 hist += 'scantable'
1838 elif k2 == 'mask':
[1118]1839 if isinstance(v2, list) or isinstance(v2, tuple):
[513]1840 hist += str(self._zip_mask(v2))
1841 else:
1842 hist += str(v2)
[484]1843 else:
[513]1844 hist += str(v2)
[484]1845 else:
1846 hist += k
1847 hist += "="
[1118]1848 if isinstance(v, scantable):
[484]1849 hist += 'scantable'
1850 elif k == 'mask':
[1118]1851 if isinstance(v, list) or isinstance(v, tuple):
[513]1852 hist += str(self._zip_mask(v))
1853 else:
1854 hist += str(v)
[484]1855 else:
1856 hist += str(v)
1857 hist += sep
1858 hist = hist[:-2] # remove trailing '##'
1859 self._addhistory(hist)
1860
[710]1861
[484]1862 def _zip_mask(self, mask):
1863 mask = list(mask)
1864 i = 0
1865 segments = []
1866 while mask[i:].count(1):
1867 i += mask[i:].index(1)
1868 if mask[i:].count(0):
1869 j = i + mask[i:].index(0)
1870 else:
[710]1871 j = len(mask)
[1118]1872 segments.append([i, j])
[710]1873 i = j
[484]1874 return segments
[714]1875
[626]1876 def _get_ordinate_label(self):
1877 fu = "("+self.get_fluxunit()+")"
1878 import re
1879 lbl = "Intensity"
[1118]1880 if re.match(".K.", fu):
[626]1881 lbl = "Brightness Temperature "+ fu
[1118]1882 elif re.match(".Jy.", fu):
[626]1883 lbl = "Flux density "+ fu
1884 return lbl
[710]1885
[876]1886 def _check_ifs(self):
1887 nchans = [self.nchan(i) for i in range(self.nif(-1))]
[889]1888 nchans = filter(lambda t: t > 0, nchans)
[876]1889 return (sum(nchans)/len(nchans) == nchans[0])
[976]1890
1891 def _fill(self, names, unit, average):
1892 import os
1893 from asap._asap import stfiller
1894 first = True
1895 fullnames = []
1896 for name in names:
1897 name = os.path.expandvars(name)
1898 name = os.path.expanduser(name)
1899 if not os.path.exists(name):
1900 msg = "File '%s' does not exists" % (name)
1901 if rcParams['verbose']:
1902 asaplog.push(msg)
1903 print asaplog.pop().strip()
1904 return
1905 raise IOError(msg)
1906 fullnames.append(name)
1907 if average:
1908 asaplog.push('Auto averaging integrations')
[1079]1909 stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]1910 for name in fullnames:
[1073]1911 tbl = Scantable(stype)
1912 r = stfiller(tbl)
[976]1913 msg = "Importing %s..." % (name)
[1118]1914 asaplog.push(msg, False)
[976]1915 print_log()
[1118]1916 r._open(name, -1, -1)
[976]1917 r._read()
[1073]1918 #tbl = r._getdata()
[976]1919 if average:
[1118]1920 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]1921 #tbl = tbl2
1922 if not first:
1923 tbl = self._math._merge([self, tbl])
1924 #tbl = tbl2
1925 Scantable.__init__(self, tbl)
1926 r._close()
[1118]1927 del r, tbl
[976]1928 first = False
1929 if unit is not None:
1930 self.set_fluxunit(unit)
[1446]1931 #self.set_freqframe(rcParams['scantable.freqframe'])
[976]1932
Note: See TracBrowser for help on using the repository browser.