source: branches/alma/python/scantable.py@ 1473

Last change on this file since 1473 was 1457, checked in by TakTsutsumi, 16 years ago

New Development: No

JIRA Issue: No

Ready to Release: Yes

Interface Changes: No

What Interface Changed:

Test Programs: sdplot(plottype='azel')

Put in Release Notes: No

Description: Fixed a bug in scantable.get_time().

Updated sd.plotter for azel plotting
with current get_time option.


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 72.4 KB
RevLine 
[876]1from asap._asap import Scantable
[226]2from asap import rcParams
[1118]3from asap import print_log
4from asap import asaplog
[946]5from asap import selector
[1153]6from asap import linecatalog
[1295]7from asap import _n_bools, mask_not, mask_and, mask_or
[102]8
[876]9class scantable(Scantable):
[102]10 """
11 The ASAP container for scans
12 """
[710]13
[484]14 def __init__(self, filename, average=None, unit=None):
[102]15 """
16 Create a scantable from a saved one or make a reference
17 Parameters:
[181]18 filename: the name of an asap table on disk
19 or
20 the name of a rpfits/sdfits/ms file
21 (integrations within scans are auto averaged
22 and the whole file is read)
23 or
24 [advanced] a reference to an existing
[102]25 scantable
[484]26 average: average all integrations withinb a scan on read.
27 The default (True) is taken from .asaprc.
28 unit: brightness unit; must be consistent with K or Jy.
[340]29 Over-rides the default selected by the reader
30 (input rpfits/sdfits/ms) or replaces the value
31 in existing scantables
[710]32 """
[976]33 if average is None:
[710]34 average = rcParams['scantable.autoaverage']
[1259]35 varlist = vars()
[876]36 from asap._asap import stmath
37 self._math = stmath()
38 if isinstance(filename, Scantable):
39 Scantable.__init__(self, filename)
[181]40 else:
[1118]41 if isinstance(filename, str):
[976]42 import os.path
43 filename = os.path.expandvars(filename)
44 filename = os.path.expanduser(filename)
45 if not os.path.exists(filename):
46 s = "File '%s' not found." % (filename)
[718]47 if rcParams['verbose']:
[976]48 asaplog.push(s)
49 print asaplog.pop().strip()
[718]50 return
[976]51 raise IOError(s)
[1115]52 if os.path.isdir(filename) \
[1118]53 and not os.path.exists(filename+'/table.f1'):
[976]54 # crude check if asap table
55 if os.path.exists(filename+'/table.info'):
[1118]56 ondisk = rcParams['scantable.storage'] == 'disk'
57 Scantable.__init__(self, filename, ondisk)
[976]58 if unit is not None:
59 self.set_fluxunit(unit)
60 self.set_freqframe(rcParams['scantable.freqframe'])
[718]61 else:
[1118]62 msg = "The given file '%s'is not a valid " \
63 "asap table." % (filename)
[976]64 if rcParams['verbose']:
65 print msg
66 return
67 else:
68 raise IOError(msg)
[226]69 else:
[1118]70 self._fill([filename], unit, average)
71 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]72 and isinstance(filename[-1], str):
73 self._fill(filename, unit, average)
[1259]74 self._add_history("scantable", varlist)
[714]75 print_log()
[102]76
[876]77 def save(self, name=None, format=None, overwrite=False):
[116]78 """
[1280]79 Store the scantable on disk. This can be an asap (aips++) Table,
80 SDFITS or MS2 format.
[116]81 Parameters:
[1093]82 name: the name of the outputfile. For format "ASCII"
83 this is the root file name (data in 'name'.txt
[497]84 and header in 'name'_header.txt)
[116]85 format: an optional file format. Default is ASAP.
[280]86 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
[194]87 'SDFITS' (save as SDFITS file)
[200]88 'ASCII' (saves as ascii text file)
[226]89 'MS2' (saves as an aips++
90 MeasurementSet V2)
[411]91 overwrite: If the file should be overwritten if it exists.
[256]92 The default False is to return with warning
[411]93 without writing the output. USE WITH CARE.
[116]94 Example:
95 scan.save('myscan.asap')
[1118]96 scan.save('myscan.sdfits', 'SDFITS')
[116]97 """
[411]98 from os import path
[226]99 if format is None: format = rcParams['scantable.save']
[256]100 suffix = '.'+format.lower()
[1118]101 if name is None or name == "":
[256]102 name = 'scantable'+suffix
[718]103 msg = "No filename given. Using default name %s..." % name
104 asaplog.push(msg)
[411]105 name = path.expandvars(name)
[256]106 if path.isfile(name) or path.isdir(name):
107 if not overwrite:
[718]108 msg = "File %s exists." % name
109 if rcParams['verbose']:
110 print msg
111 return
112 else:
113 raise IOError(msg)
[451]114 format2 = format.upper()
115 if format2 == 'ASAP':
[116]116 self._save(name)
117 else:
[989]118 from asap._asap import stwriter as stw
[1118]119 writer = stw(format2)
120 writer.write(self, name)
[718]121 print_log()
[116]122 return
123
[102]124 def copy(self):
125 """
126 Return a copy of this scantable.
[1348]127 Note:
128 This makes a full (deep) copy. scan2 = scan1 makes a reference.
[102]129 Parameters:
[113]130 none
[102]131 Example:
132 copiedscan = scan.copy()
133 """
[876]134 sd = scantable(Scantable._copy(self))
[113]135 return sd
136
[1093]137 def drop_scan(self, scanid=None):
138 """
139 Return a new scantable where the specified scan number(s) has(have)
140 been dropped.
141 Parameters:
142 scanid: a (list of) scan number(s)
143 """
144 from asap import _is_sequence_or_number as _is_valid
145 from asap import _to_list
146 from asap import unique
147 if not _is_valid(scanid):
148 if rcParams['verbose']:
149 print "Please specify a scanno to drop from the scantable"
150 return
151 else:
152 raise RuntimeError("No scan given")
153 try:
154 scanid = _to_list(scanid)
155 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
156 for sid in scanid: allscans.remove(sid)
[1118]157 if len(allscans) == 0:
158 raise ValueError("Can't remove all scans")
[1093]159 except ValueError:
160 if rcParams['verbose']:
161 print "Couldn't find any match."
162 return
163 else: raise
164 try:
165 bsel = self.get_selection()
166 sel = selector()
167 sel.set_scans(allscans)
168 self.set_selection(bsel+sel)
169 scopy = self._copy()
170 self.set_selection(bsel)
171 return scantable(scopy)
172 except RuntimeError:
[1118]173 if rcParams['verbose']:
174 print "Couldn't find any match."
175 else:
176 raise
[1093]177
178
[102]179 def get_scan(self, scanid=None):
180 """
181 Return a specific scan (by scanno) or collection of scans (by
182 source name) in a new scantable.
[1348]183 Note:
184 See scantable.drop_scan() for the inverse operation.
[102]185 Parameters:
[513]186 scanid: a (list of) scanno or a source name, unix-style
187 patterns are accepted for source name matching, e.g.
188 '*_R' gets all 'ref scans
[102]189 Example:
[513]190 # get all scans containing the source '323p459'
191 newscan = scan.get_scan('323p459')
192 # get all 'off' scans
193 refscans = scan.get_scan('*_R')
194 # get a susbset of scans by scanno (as listed in scan.summary())
[1118]195 newscan = scan.get_scan([0, 2, 7, 10])
[102]196 """
197 if scanid is None:
[718]198 if rcParams['verbose']:
[1118]199 print "Please specify a scan no or name to " \
200 "retrieve from the scantable"
[718]201 return
202 else:
203 raise RuntimeError("No scan given")
204
[102]205 try:
[946]206 bsel = self.get_selection()
207 sel = selector()
[102]208 if type(scanid) is str:
[946]209 sel.set_name(scanid)
210 self.set_selection(bsel+sel)
[876]211 scopy = self._copy()
[946]212 self.set_selection(bsel)
[876]213 return scantable(scopy)
[102]214 elif type(scanid) is int:
[946]215 sel.set_scans([scanid])
216 self.set_selection(bsel+sel)
[876]217 scopy = self._copy()
[946]218 self.set_selection(bsel)
[876]219 return scantable(scopy)
[381]220 elif type(scanid) is list:
[946]221 sel.set_scans(scanid)
222 self.set_selection(sel)
[876]223 scopy = self._copy()
[946]224 self.set_selection(bsel)
[876]225 return scantable(scopy)
[381]226 else:
[718]227 msg = "Illegal scanid type, use 'int' or 'list' if ints."
228 if rcParams['verbose']:
229 print msg
230 else:
231 raise TypeError(msg)
[102]232 except RuntimeError:
[718]233 if rcParams['verbose']: print "Couldn't find any match."
234 else: raise
[102]235
236 def __str__(self):
[1118]237 return Scantable._summary(self, True)
[102]238
[976]239 def summary(self, filename=None):
[102]240 """
241 Print a summary of the contents of this scantable.
242 Parameters:
243 filename: the name of a file to write the putput to
244 Default - no file output
[381]245 verbose: print extra info such as the frequency table
246 The default (False) is taken from .asaprc
[102]247 """
[976]248 info = Scantable._summary(self, True)
249 #if verbose is None: verbose = rcParams['scantable.verbosesummary']
[102]250 if filename is not None:
[256]251 if filename is "":
252 filename = 'scantable_summary.txt'
[415]253 from os.path import expandvars, isdir
[411]254 filename = expandvars(filename)
[415]255 if not isdir(filename):
[413]256 data = open(filename, 'w')
257 data.write(info)
258 data.close()
259 else:
[718]260 msg = "Illegal file name '%s'." % (filename)
261 if rcParams['verbose']:
262 print msg
263 else:
264 raise IOError(msg)
265 if rcParams['verbose']:
[794]266 try:
267 from IPython.genutils import page as pager
268 except ImportError:
269 from pydoc import pager
270 pager(info)
[718]271 else:
272 return info
[710]273
[946]274
275 def get_selection(self):
276 """
[1005]277 Get the selection object currently set on this scantable.
278 Parameters:
279 none
280 Example:
281 sel = scan.get_selection()
282 sel.set_ifs(0) # select IF 0
283 scan.set_selection(sel) # apply modified selection
[946]284 """
285 return selector(self._getselection())
286
[1005]287 def set_selection(self, selection=selector()):
[946]288 """
[1005]289 Select a subset of the data. All following operations on this scantable
290 are only applied to thi selection.
291 Parameters:
292 selection: a selector object (default unset the selection)
293 Examples:
294 sel = selector() # create a selection object
[1118]295 self.set_scans([0, 3]) # select SCANNO 0 and 3
[1005]296 scan.set_selection(sel) # set the selection
297 scan.summary() # will only print summary of scanno 0 an 3
298 scan.set_selection() # unset the selection
[946]299 """
300 self._setselection(selection)
301
[1446]302 def get_row(self, row=0, insitu=None):
303 """
304 Select a row in the scantable.
305 Return a scantable with single row.
306 Parameters:
307 row: row no of integration, default is 0.
308 insitu: if False a new scantable is returned.
309 Otherwise, the scaling is done in-situ
310 The default is taken from .asaprc (False)
311 """
312 if insitu is None: insitu = rcParams['insitu']
313 if not insitu:
314 workscan = self.copy()
315 else:
316 workscan = self
317 # Select a row
318 sel=selector()
319 sel.set_scans([workscan.getscan(row)])
320 sel.set_cycles([workscan.getcycle(row)])
321 sel.set_beams([workscan.getbeam(row)])
322 sel.set_ifs([workscan.getif(row)])
323 sel.set_polarisations([workscan.getpol(row)])
324 sel.set_name(workscan._getsourcename(row))
325 workscan.set_selection(sel)
326 if not workscan.nrow() == 1:
327 msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
328 raise RuntimeError(msg)
329 del sel
330 if insitu:
331 self._assign(workscan)
332 else:
333 return workscan
334
[876]335 def stats(self, stat='stddev', mask=None):
[102]336 """
[135]337 Determine the specified statistic of the current beam/if/pol
[102]338 Takes a 'mask' as an optional parameter to specify which
339 channels should be excluded.
340 Parameters:
[135]341 stat: 'min', 'max', 'sumsq', 'sum', 'mean'
342 'var', 'stddev', 'avdev', 'rms', 'median'
343 mask: an optional mask specifying where the statistic
[102]344 should be determined.
345 Example:
[113]346 scan.set_unit('channel')
[1118]347 msk = scan.create_mask([100, 200], [500, 600])
[135]348 scan.stats(stat='mean', mask=m)
[102]349 """
350 if mask == None:
[876]351 mask = []
[1118]352 axes = ['Beam', 'IF', 'Pol', 'Time']
[876]353 if not self._check_ifs():
[1118]354 raise ValueError("Cannot apply mask as the IFs have different "
355 "number of channels. Please use setselection() "
356 "to select individual IFs")
[256]357
[876]358 statvals = self._math._stats(self, mask, stat)
359 out = ''
360 axes = []
361 for i in range(self.nrow()):
362 axis = []
363 axis.append(self.getscan(i))
364 axis.append(self.getbeam(i))
365 axis.append(self.getif(i))
366 axis.append(self.getpol(i))
367 axis.append(self.getcycle(i))
368 axes.append(axis)
369 tm = self._gettime(i)
370 src = self._getsourcename(i)
371 out += 'Scan[%d] (%s) ' % (axis[0], src)
372 out += 'Time[%s]:\n' % (tm)
373 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
374 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
375 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
376 out += '= %3.3f\n' % (statvals[i])
377 out += "--------------------------------------------------\n"
[256]378
[876]379 if rcParams['verbose']:
380 print "--------------------------------------------------"
[1118]381 print " ", stat
[876]382 print "--------------------------------------------------"
383 print out
[1295]384 #else:
385 #retval = { 'axesnames': ['scanno', 'beamno', 'ifno', 'polno', 'cycleno'],
386 # 'axes' : axes,
387 # 'data': statvals}
388 return statvals
[102]389
[1118]390 def stddev(self, mask=None):
[135]391 """
392 Determine the standard deviation of the current beam/if/pol
393 Takes a 'mask' as an optional parameter to specify which
394 channels should be excluded.
395 Parameters:
396 mask: an optional mask specifying where the standard
397 deviation should be determined.
398
399 Example:
400 scan.set_unit('channel')
[1118]401 msk = scan.create_mask([100, 200], [500, 600])
[135]402 scan.stddev(mask=m)
403 """
[1118]404 return self.stats(stat='stddev', mask=mask);
[135]405
[1003]406
[1259]407 def get_column_names(self):
[1003]408 """
409 Return a list of column names, which can be used for selection.
410 """
[1259]411 return list(Scantable.get_column_names(self))
[1003]412
[876]413 def get_tsys(self):
[113]414 """
415 Return the System temperatures.
416 Returns:
[876]417 a list of Tsys values for the current selection
[113]418 """
[256]419
[876]420 return self._row_callback(self._gettsys, "Tsys")
[256]421
[876]422 def _row_callback(self, callback, label):
423 axes = []
[1118]424 axesnames = ['scanno', 'beamno', 'ifno', 'polno', 'cycleno']
[876]425 out = ""
[1118]426 outvec = []
[876]427 for i in range(self.nrow()):
428 axis = []
429 axis.append(self.getscan(i))
430 axis.append(self.getbeam(i))
431 axis.append(self.getif(i))
432 axis.append(self.getpol(i))
433 axis.append(self.getcycle(i))
434 axes.append(axis)
435 tm = self._gettime(i)
436 src = self._getsourcename(i)
437 out += 'Scan[%d] (%s) ' % (axis[0], src)
438 out += 'Time[%s]:\n' % (tm)
439 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
440 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
441 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
442 outvec.append(callback(i))
443 out += '= %3.3f\n' % (outvec[i])
444 out += "--------------------------------------------------\n"
445 if rcParams['verbose']:
446 print "--------------------------------------------------"
447 print " %s" % (label)
448 print "--------------------------------------------------"
449 print out
[1175]450 # disabled because the vector seems more useful
451 #retval = {'axesnames': axesnames, 'axes': axes, 'data': outvec}
452 return outvec
[256]453
[1070]454 def _get_column(self, callback, row=-1):
455 """
456 """
457 if row == -1:
458 return [callback(i) for i in range(self.nrow())]
459 else:
460 if 0 <= row < self.nrow():
461 return callback(row)
[256]462
[1070]463
[1348]464 def get_time(self, row=-1, asdatetime=False):
[113]465 """
466 Get a list of time stamps for the observations.
[1348]467 Return a datetime object for each integration time stamp in the scantable.
[113]468 Parameters:
[1348]469 row: row no of integration. Default -1 return all rows
470 asdatetime: return values as datetime objects rather than strings
[113]471 Example:
472 none
473 """
[1175]474 from time import strptime
475 from datetime import datetime
[1457]476 times = self._get_column(self._gettime, row)
[1348]477 if not asdatetime:
[1457]478 return times
[1175]479 format = "%Y/%m/%d/%H:%M:%S"
480 if isinstance(times, list):
481 return [datetime(*strptime(i, format)[:6]) for i in times]
482 else:
483 return datetime(*strptime(times, format)[:6])
[102]484
[1348]485
486 def get_inttime(self, row=-1):
487 """
488 Get a list of integration times for the observations.
489 Return a time in seconds for each integration in the scantable.
490 Parameters:
491 row: row no of integration. Default -1 return all rows.
492 Example:
493 none
494 """
495 return self._get_column(self._getinttime, row)
496
497
[714]498 def get_sourcename(self, row=-1):
499 """
[794]500 Get a list source names for the observations.
[714]501 Return a string for each integration in the scantable.
502 Parameters:
[1348]503 row: row no of integration. Default -1 return all rows.
[714]504 Example:
505 none
506 """
[1070]507 return self._get_column(self._getsourcename, row)
[714]508
[794]509 def get_elevation(self, row=-1):
510 """
511 Get a list of elevations for the observations.
512 Return a float for each integration in the scantable.
513 Parameters:
[1348]514 row: row no of integration. Default -1 return all rows.
[794]515 Example:
516 none
517 """
[1070]518 return self._get_column(self._getelevation, row)
[794]519
520 def get_azimuth(self, row=-1):
521 """
522 Get a list of azimuths for the observations.
523 Return a float for each integration in the scantable.
524 Parameters:
[1348]525 row: row no of integration. Default -1 return all rows.
[794]526 Example:
527 none
528 """
[1070]529 return self._get_column(self._getazimuth, row)
[794]530
531 def get_parangle(self, row=-1):
532 """
533 Get a list of parallactic angles for the observations.
534 Return a float for each integration in the scantable.
535 Parameters:
[1348]536 row: row no of integration. Default -1 return all rows.
[794]537 Example:
538 none
539 """
[1070]540 return self._get_column(self._getparangle, row)
[794]541
[1070]542 def get_direction(self, row=-1):
543 """
544 Get a list of Positions on the sky (direction) for the observations.
545 Return a float for each integration in the scantable.
546 Parameters:
547 row: row no of integration. Default -1 return all rows
548 Example:
549 none
550 """
551 return self._get_column(self._getdirection, row)
552
[1389]553 def get_directionval(self, row=-1):
554 """
555 Get a list of Positions on the sky (direction) for the observations.
556 Return a float for each integration in the scantable.
557 Parameters:
558 row: row no of integration. Default -1 return all rows
559 Example:
560 none
561 """
562 return self._get_column(self._getdirectionvec, row)
563
[102]564 def set_unit(self, unit='channel'):
565 """
566 Set the unit for all following operations on this scantable
567 Parameters:
568 unit: optional unit, default is 'channel'
[1118]569 one of '*Hz', 'km/s', 'channel', ''
[102]570 """
[484]571 varlist = vars()
[1118]572 if unit in ['', 'pixel', 'channel']:
[113]573 unit = ''
574 inf = list(self._getcoordinfo())
575 inf[0] = unit
576 self._setcoordinfo(inf)
[1118]577 self._add_history("set_unit", varlist)
[113]578
[484]579 def set_instrument(self, instr):
[358]580 """
[1348]581 Set the instrument for subsequent processing.
[358]582 Parameters:
[710]583 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]584 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[358]585 """
586 self._setInstrument(instr)
[1118]587 self._add_history("set_instument", vars())
[718]588 print_log()
[358]589
[1190]590 def set_feedtype(self, feedtype):
591 """
592 Overwrite the feed type, which might not be set correctly.
593 Parameters:
594 feedtype: 'linear' or 'circular'
595 """
596 self._setfeedtype(feedtype)
597 self._add_history("set_feedtype", vars())
598 print_log()
599
[276]600 def set_doppler(self, doppler='RADIO'):
601 """
602 Set the doppler for all following operations on this scantable.
603 Parameters:
604 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
605 """
[484]606 varlist = vars()
[276]607 inf = list(self._getcoordinfo())
608 inf[2] = doppler
609 self._setcoordinfo(inf)
[1118]610 self._add_history("set_doppler", vars())
[718]611 print_log()
[710]612
[226]613 def set_freqframe(self, frame=None):
[113]614 """
615 Set the frame type of the Spectral Axis.
616 Parameters:
[591]617 frame: an optional frame type, default 'LSRK'. Valid frames are:
[1118]618 'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
619 'GEO', 'GALACTO', 'LGROUP', 'CMB'
[113]620 Examples:
621 scan.set_freqframe('BARY')
622 """
[484]623 if frame is None: frame = rcParams['scantable.freqframe']
624 varlist = vars()
[1118]625 valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
626 'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]627
[989]628 if frame in valid:
[113]629 inf = list(self._getcoordinfo())
630 inf[1] = frame
631 self._setcoordinfo(inf)
[1118]632 self._add_history("set_freqframe", varlist)
[102]633 else:
[1118]634 msg = "Please specify a valid freq type. Valid types are:\n", valid
[718]635 if rcParams['verbose']:
636 print msg
637 else:
638 raise TypeError(msg)
639 print_log()
[710]640
[989]641 def set_dirframe(self, frame=""):
642 """
643 Set the frame type of the Direction on the sky.
644 Parameters:
645 frame: an optional frame type, default ''. Valid frames are:
646 'J2000', 'B1950', 'GALACTIC'
647 Examples:
648 scan.set_dirframe('GALACTIC')
649 """
650 varlist = vars()
651 try:
652 Scantable.set_dirframe(self, frame)
[1118]653 except RuntimeError, msg:
[989]654 if rcParams['verbose']:
655 print msg
656 else:
657 raise
[1118]658 self._add_history("set_dirframe", varlist)
[989]659
[113]660 def get_unit(self):
661 """
662 Get the default unit set in this scantable
663 Returns:
664 A unit string
665 """
666 inf = self._getcoordinfo()
667 unit = inf[0]
668 if unit == '': unit = 'channel'
669 return unit
[102]670
[158]671 def get_abcissa(self, rowno=0):
[102]672 """
[158]673 Get the abcissa in the current coordinate setup for the currently
[113]674 selected Beam/IF/Pol
675 Parameters:
[226]676 rowno: an optional row number in the scantable. Default is the
677 first row, i.e. rowno=0
[113]678 Returns:
[1348]679 The abcissa values and the format string (as a dictionary)
[113]680 """
[256]681 abc = self._getabcissa(rowno)
[710]682 lbl = self._getabcissalabel(rowno)
[718]683 print_log()
[158]684 return abc, lbl
[113]685
[1401]686 def flag(self, mask=None, unflag=False):
[1001]687 """
688 Flag the selected data using an optional channel mask.
689 Parameters:
690 mask: an optional channel mask, created with create_mask. Default
691 (no mask) is all channels.
[1401]692 unflag: if True, unflag the data
[1001]693 """
694 varlist = vars()
[1118]695 if mask is None:
696 mask = []
[1001]697 try:
[1401]698 self._flag(mask, unflag)
[1118]699 except RuntimeError, msg:
[1001]700 if rcParams['verbose']:
701 print msg
702 return
703 else: raise
704 self._add_history("flag", varlist)
705
[1203]706 def lag_flag(self, frequency, width=0.0, unit="GHz", insitu=None):
[1192]707 """
708 Flag the data in 'lag' space by providing a frequency to remove.
709 Flagged data in the scantable gets set to 0.0 before the fft.
710 No taper is applied.
711 Parameters:
[1348]712 frequency: the frequency (really a period within the bandwidth)
713 to remove
714 width: the width of the frequency to remove, to remove a
715 range of frequencies aroung the centre.
[1203]716 unit: the frequency unit (default "GHz")
717 Notes:
[1348]718 It is recommended to flag edges of the band or strong
719 signals beforehand.
[1192]720 """
721 if insitu is None: insitu = rcParams['insitu']
722 self._math._setinsitu(insitu)
723 varlist = vars()
[1370]724 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1. }
[1192]725 if not base.has_key(unit):
726 raise ValueError("%s is not a valid unit." % unit)
727 try:
[1200]728 s = scantable(self._math._lag_flag(self, frequency*base[unit],
729 width*base[unit]))
[1192]730 except RuntimeError, msg:
731 if rcParams['verbose']:
732 print msg
733 return
734 else: raise
735 s._add_history("lag_flag", varlist)
736 print_log()
737 if insitu:
738 self._assign(s)
739 else:
740 return s
[1001]741
[1192]742
[113]743 def create_mask(self, *args, **kwargs):
744 """
[1118]745 Compute and return a mask based on [min, max] windows.
[189]746 The specified windows are to be INCLUDED, when the mask is
[113]747 applied.
[102]748 Parameters:
[1118]749 [min, max], [min2, max2], ...
[1024]750 Pairs of start/end points (inclusive)specifying the regions
[102]751 to be masked
[189]752 invert: optional argument. If specified as True,
753 return an inverted mask, i.e. the regions
754 specified are EXCLUDED
[513]755 row: create the mask using the specified row for
756 unit conversions, default is row=0
757 only necessary if frequency varies over rows.
[102]758 Example:
[113]759 scan.set_unit('channel')
760 a)
[1118]761 msk = scan.create_mask([400, 500], [800, 900])
[189]762 # masks everything outside 400 and 500
[113]763 # and 800 and 900 in the unit 'channel'
764
765 b)
[1118]766 msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]767 # masks the regions between 400 and 500
[113]768 # and 800 and 900 in the unit 'channel'
[1024]769 c)
770 mask only channel 400
[1118]771 msk = scan.create_mask([400, 400])
[102]772 """
[513]773 row = 0
774 if kwargs.has_key("row"):
775 row = kwargs.get("row")
776 data = self._getabcissa(row)
[113]777 u = self._getcoordinfo()[0]
[718]778 if rcParams['verbose']:
[113]779 if u == "": u = "channel"
[718]780 msg = "The current mask window unit is %s" % u
[1118]781 i = self._check_ifs()
782 if not i:
[876]783 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
[718]784 asaplog.push(msg)
[102]785 n = self.nchan()
[1295]786 msk = _n_bools(n, False)
[710]787 # test if args is a 'list' or a 'normal *args - UGLY!!!
788
[1118]789 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
790 and args or args[0]
[710]791 for window in ws:
[102]792 if (len(window) != 2 or window[0] > window[1] ):
[1118]793 raise TypeError("A window needs to be defined as [min, max]")
[102]794 for i in range(n):
[1024]795 if data[i] >= window[0] and data[i] <= window[1]:
[1295]796 msk[i] = True
[113]797 if kwargs.has_key('invert'):
798 if kwargs.get('invert'):
[1295]799 msk = mask_not(msk)
[718]800 print_log()
[102]801 return msk
[710]802
[1446]803 def get_masklist(self, mask=None, row=0):
[256]804 """
[1446]805 Compute and return a list of mask windows, [min, max].
806 Parameters:
807 mask: channel mask, created with create_mask.
808 row: calcutate the masklist using the specified row
809 for unit conversions, default is row=0
810 only necessary if frequency varies over rows.
811 Returns:
812 [min, max], [min2, max2], ...
813 Pairs of start/end points (inclusive)specifying
814 the masked regions
815 """
816 if not (isinstance(mask,list) or isinstance(mask, tuple)):
817 raise TypeError("The mask should be list or tuple.")
818 if len(mask) < 2:
819 raise TypeError("The mask elements should be > 1")
820 if self.nchan() != len(mask):
821 msg = "Number of channels in scantable != number of mask elements"
822 raise TypeError(msg)
823 data = self._getabcissa(row)
824 u = self._getcoordinfo()[0]
825 if rcParams['verbose']:
826 if u == "": u = "channel"
827 msg = "The current mask window unit is %s" % u
828 i = self._check_ifs()
829 if not i:
830 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
831 asaplog.push(msg)
832 masklist=[]
833 ist, ien = None, None
834 ist, ien=self.get_mask_indices(mask)
835 if ist is not None and ien is not None:
836 for i in xrange(len(ist)):
837 range=[data[ist[i]],data[ien[i]]]
838 range.sort()
839 masklist.append([range[0],range[1]])
840 return masklist
841
842 def get_mask_indices(self, mask=None):
843 """
844 Compute and Return lists of mask start indices and mask end indices.
845 Parameters:
846 mask: channel mask, created with create_mask.
847 Returns:
848 List of mask start indices and that of mask end indices,
849 i.e., [istart1,istart2,....], [iend1,iend2,....].
850 """
851 if not (isinstance(mask,list) or isinstance(mask, tuple)):
852 raise TypeError("The mask should be list or tuple.")
853 if len(mask) < 2:
854 raise TypeError("The mask elements should be > 1")
855 istart=[]
856 iend=[]
857 if mask[0]: istart.append(0)
858 for i in range(len(mask)-1):
859 if not mask[i] and mask[i+1]:
860 istart.append(i+1)
861 elif mask[i] and not mask[i+1]:
862 iend.append(i)
863 if mask[len(mask)-1]: iend.append(len(mask)-1)
864 if len(istart) != len(iend):
865 raise RuntimeError("Numbers of mask start != mask end.")
866 for i in range(len(istart)):
867 if istart[i] > iend[i]:
868 raise RuntimeError("Mask start index > mask end index")
869 break
870 return istart,iend
871
872# def get_restfreqs(self):
873# """
874# Get the restfrequency(s) stored in this scantable.
875# The return value(s) are always of unit 'Hz'
876# Parameters:
877# none
878# Returns:
879# a list of doubles
880# """
881# return list(self._getrestfreqs())
882
883 def get_restfreqs(self, ids=None):
884 """
[256]885 Get the restfrequency(s) stored in this scantable.
886 The return value(s) are always of unit 'Hz'
887 Parameters:
[1446]888 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
889 be retrieved
[256]890 Returns:
[1446]891 dictionary containing ids and a list of doubles for each id
[256]892 """
[1446]893 if ids is None:
894 rfreqs={}
895 idlist = self.getmolnos()
896 for i in idlist:
897 rfreqs[i]=list(self._getrestfreqs(i))
898 return rfreqs
899 else:
900 if type(ids)==list or type(ids)==tuple:
901 rfreqs={}
902 for i in ids:
903 rfreqs[i]=list(self._getrestfreqs(i))
904 return rfreqs
905 else:
906 return list(self._getrestfreqs(ids))
907 #return list(self._getrestfreqs(ids))
[102]908
[931]909 def set_restfreqs(self, freqs=None, unit='Hz'):
910 """
[1446]911 ********NEED TO BE UPDATED begin************
[931]912 Set or replace the restfrequency specified and
913 If the 'freqs' argument holds a scalar,
914 then that rest frequency will be applied to all the selected
915 data. If the 'freqs' argument holds
916 a vector, then it MUST be of equal or smaller length than
917 the number of IFs (and the available restfrequencies will be
918 replaced by this vector). In this case, *all* data have
919 the restfrequency set per IF according
920 to the corresponding value you give in the 'freqs' vector.
[1118]921 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
[931]922 IF 1 gets restfreq 2e9.
[1446]923 ********NEED TO BE UPDATED end************
[1153]924 You can also specify the frequencies via a linecatalog/
925
[931]926 Parameters:
927 freqs: list of rest frequency values or string idenitfiers
928 unit: unit for rest frequency (default 'Hz')
[402]929
[931]930 Example:
[1446]931 # set the given restfrequency for the all currently selected IFs
[931]932 scan.set_restfreqs(freqs=1.4e9)
[1446]933 # set multiple restfrequencies to all the selected data
934 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
935 # If the number of IFs in the data is >= 2 the IF0 gets the first
936 # value IF1 the second... NOTE that freqs needs to be
937 # specified in list of list (e.g. [[],[],...] ).
938 scan.set_restfreqs(freqs=[[1.4e9],[1.67e9]])
[931]939 #set the given restfrequency for the whole table (by name)
940 scan.set_restfreqs(freqs="OH1667")
[391]941
[931]942 Note:
943 To do more sophisticate Restfrequency setting, e.g. on a
944 source and IF basis, use scantable.set_selection() before using
945 this function.
946 # provide your scantable is call scan
947 selection = selector()
948 selection.set_name("ORION*")
949 selection.set_ifs([1])
950 scan.set_selection(selection)
951 scan.set_restfreqs(freqs=86.6e9)
952
953 """
954 varlist = vars()
[1157]955 from asap import linecatalog
956 # simple value
[1118]957 if isinstance(freqs, int) or isinstance(freqs, float):
[1446]958 # TT mod
959 #self._setrestfreqs(freqs, "",unit)
960 self._setrestfreqs([freqs], [""],unit)
[1157]961 # list of values
[1118]962 elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]963 # list values are scalars
[1118]964 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[1446]965 self._setrestfreqs(freqs, [""],unit)
[1157]966 # list values are tuples, (value, name)
967 elif isinstance(freqs[-1], dict):
[1446]968 #sel = selector()
969 #savesel = self._getselection()
970 #iflist = self.getifnos()
971 #for i in xrange(len(freqs)):
972 # sel.set_ifs(iflist[i])
973 # self._setselection(sel)
974 # self._setrestfreqs(freqs[i], "",unit)
975 #self._setselection(savesel)
976 self._setrestfreqs(freqs["value"],
977 freqs["name"], "MHz")
978 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
[1157]979 sel = selector()
980 savesel = self._getselection()
[1322]981 iflist = self.getifnos()
[1446]982 if len(freqs)>len(iflist):
983 raise ValueError("number of elements in list of list exeeds the current IF selections")
[1157]984 for i in xrange(len(freqs)):
[1322]985 sel.set_ifs(iflist[i])
[1259]986 self._setselection(sel)
[1157]987 self._setrestfreqs(freqs[i]["value"],
988 freqs[i]["name"], "MHz")
989 self._setselection(savesel)
990 # freqs are to be taken from a linecatalog
[1153]991 elif isinstance(freqs, linecatalog):
992 sel = selector()
993 savesel = self._getselection()
994 for i in xrange(freqs.nrow()):
[1322]995 sel.set_ifs(iflist[i])
[1153]996 self._setselection(sel)
997 self._setrestfreqs(freqs.get_frequency(i),
998 freqs.get_name(i), "MHz")
999 # ensure that we are not iterating past nIF
1000 if i == self.nif()-1: break
1001 self._setselection(savesel)
[931]1002 else:
1003 return
1004 self._add_history("set_restfreqs", varlist)
1005
[1360]1006 def shift_refpix(self, delta):
1007 """
1008 Shift the reference pixel of the Spectra Coordinate by an
1009 integer amount.
1010 Parameters:
1011 delta: the amount to shift by
1012 Note:
1013 Be careful using this with broadband data.
1014 """
1015 Scantable.shift(self, delta)
[931]1016
[1259]1017 def history(self, filename=None):
1018 """
1019 Print the history. Optionally to a file.
[1348]1020 Parameters:
1021 filename: The name of the file to save the history to.
[1259]1022 """
[484]1023 hist = list(self._gethistory())
[794]1024 out = "-"*80
[484]1025 for h in hist:
[489]1026 if h.startswith("---"):
[794]1027 out += "\n"+h
[489]1028 else:
1029 items = h.split("##")
1030 date = items[0]
1031 func = items[1]
1032 items = items[2:]
[794]1033 out += "\n"+date+"\n"
1034 out += "Function: %s\n Parameters:" % (func)
[489]1035 for i in items:
1036 s = i.split("=")
[1118]1037 out += "\n %s = %s" % (s[0], s[1])
[794]1038 out += "\n"+"-"*80
[1259]1039 if filename is not None:
1040 if filename is "":
1041 filename = 'scantable_history.txt'
1042 import os
1043 filename = os.path.expandvars(os.path.expanduser(filename))
1044 if not os.path.isdir(filename):
1045 data = open(filename, 'w')
1046 data.write(out)
1047 data.close()
1048 else:
1049 msg = "Illegal file name '%s'." % (filename)
1050 if rcParams['verbose']:
1051 print msg
1052 else:
1053 raise IOError(msg)
1054 if rcParams['verbose']:
1055 try:
1056 from IPython.genutils import page as pager
1057 except ImportError:
1058 from pydoc import pager
1059 pager(out)
1060 else:
1061 return out
[484]1062 return
[513]1063 #
1064 # Maths business
1065 #
1066
[931]1067 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
[513]1068 """
[1070]1069 Return the (time) weighted average of a scan.
[513]1070 Note:
[1070]1071 in channels only - align if necessary
[513]1072 Parameters:
1073 mask: an optional mask (only used for 'var' and 'tsys'
1074 weighting)
[558]1075 scanav: True averages each scan separately
1076 False (default) averages all scans together,
[1099]1077 weight: Weighting scheme.
1078 'none' (mean no weight)
1079 'var' (1/var(spec) weighted)
1080 'tsys' (1/Tsys**2 weighted)
1081 'tint' (integration time weighted)
1082 'tintsys' (Tint/Tsys**2)
1083 'median' ( median averaging)
[535]1084 The default is 'tint'
[931]1085 align: align the spectra in velocity before averaging. It takes
1086 the time of the first spectrum as reference time.
[513]1087 Example:
1088 # time average the scantable without using a mask
[710]1089 newscan = scan.average_time()
[513]1090 """
1091 varlist = vars()
[976]1092 if weight is None: weight = 'TINT'
[513]1093 if mask is None: mask = ()
[1099]1094 if scanav: scanav = "SCAN"
1095 else: scanav = "NONE"
[1118]1096 scan = (self, )
[989]1097 try:
[1118]1098 if align:
1099 scan = (self.freq_align(insitu=False), )
1100 s = None
1101 if weight.upper() == 'MEDIAN':
1102 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1103 scanav))
1104 else:
1105 s = scantable(self._math._average(scan, mask, weight.upper(),
1106 scanav))
1107 except RuntimeError, msg:
[989]1108 if rcParams['verbose']:
1109 print msg
1110 return
1111 else: raise
[1099]1112 s._add_history("average_time", varlist)
[718]1113 print_log()
[513]1114 return s
[710]1115
[876]1116 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[513]1117 """
1118 Return a scan where all spectra are converted to either
1119 Jansky or Kelvin depending upon the flux units of the scan table.
1120 By default the function tries to look the values up internally.
1121 If it can't find them (or if you want to over-ride), you must
1122 specify EITHER jyperk OR eta (and D which it will try to look up
1123 also if you don't set it). jyperk takes precedence if you set both.
1124 Parameters:
1125 jyperk: the Jy / K conversion factor
1126 eta: the aperture efficiency
1127 d: the geomtric diameter (metres)
1128 insitu: if False a new scantable is returned.
1129 Otherwise, the scaling is done in-situ
1130 The default is taken from .asaprc (False)
1131 """
1132 if insitu is None: insitu = rcParams['insitu']
[876]1133 self._math._setinsitu(insitu)
[513]1134 varlist = vars()
1135 if jyperk is None: jyperk = -1.0
1136 if d is None: d = -1.0
1137 if eta is None: eta = -1.0
[876]1138 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1139 s._add_history("convert_flux", varlist)
1140 print_log()
1141 if insitu: self._assign(s)
1142 else: return s
[513]1143
[876]1144 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[513]1145 """
1146 Return a scan after applying a gain-elevation correction.
1147 The correction can be made via either a polynomial or a
1148 table-based interpolation (and extrapolation if necessary).
1149 You specify polynomial coefficients, an ascii table or neither.
1150 If you specify neither, then a polynomial correction will be made
1151 with built in coefficients known for certain telescopes (an error
1152 will occur if the instrument is not known).
1153 The data and Tsys are *divided* by the scaling factors.
1154 Parameters:
1155 poly: Polynomial coefficients (default None) to compute a
1156 gain-elevation correction as a function of
1157 elevation (in degrees).
1158 filename: The name of an ascii file holding correction factors.
1159 The first row of the ascii file must give the column
1160 names and these MUST include columns
1161 "ELEVATION" (degrees) and "FACTOR" (multiply data
1162 by this) somewhere.
1163 The second row must give the data type of the
1164 column. Use 'R' for Real and 'I' for Integer.
1165 An example file would be
1166 (actual factors are arbitrary) :
1167
1168 TIME ELEVATION FACTOR
1169 R R R
1170 0.1 0 0.8
1171 0.2 20 0.85
1172 0.3 40 0.9
1173 0.4 60 0.85
1174 0.5 80 0.8
1175 0.6 90 0.75
1176 method: Interpolation method when correcting from a table.
1177 Values are "nearest", "linear" (default), "cubic"
1178 and "spline"
1179 insitu: if False a new scantable is returned.
1180 Otherwise, the scaling is done in-situ
1181 The default is taken from .asaprc (False)
1182 """
1183
1184 if insitu is None: insitu = rcParams['insitu']
[876]1185 self._math._setinsitu(insitu)
[513]1186 varlist = vars()
1187 if poly is None:
[1118]1188 poly = ()
[513]1189 from os.path import expandvars
1190 filename = expandvars(filename)
[876]1191 s = scantable(self._math._gainel(self, poly, filename, method))
1192 s._add_history("gain_el", varlist)
1193 print_log()
1194 if insitu: self._assign(s)
1195 else: return s
[710]1196
[931]1197 def freq_align(self, reftime=None, method='cubic', insitu=None):
[513]1198 """
1199 Return a scan where all rows have been aligned in frequency/velocity.
1200 The alignment frequency frame (e.g. LSRK) is that set by function
1201 set_freqframe.
1202 Parameters:
1203 reftime: reference time to align at. By default, the time of
1204 the first row of data is used.
1205 method: Interpolation method for regridding the spectra.
1206 Choose from "nearest", "linear", "cubic" (default)
1207 and "spline"
1208 insitu: if False a new scantable is returned.
1209 Otherwise, the scaling is done in-situ
1210 The default is taken from .asaprc (False)
1211 """
[931]1212 if insitu is None: insitu = rcParams["insitu"]
[876]1213 self._math._setinsitu(insitu)
[513]1214 varlist = vars()
[931]1215 if reftime is None: reftime = ""
1216 s = scantable(self._math._freq_align(self, reftime, method))
[876]1217 s._add_history("freq_align", varlist)
1218 print_log()
1219 if insitu: self._assign(s)
1220 else: return s
[513]1221
[876]1222 def opacity(self, tau, insitu=None):
[513]1223 """
1224 Apply an opacity correction. The data
1225 and Tsys are multiplied by the correction factor.
1226 Parameters:
1227 tau: Opacity from which the correction factor is
1228 exp(tau*ZD)
1229 where ZD is the zenith-distance
1230 insitu: if False a new scantable is returned.
1231 Otherwise, the scaling is done in-situ
1232 The default is taken from .asaprc (False)
1233 """
1234 if insitu is None: insitu = rcParams['insitu']
[876]1235 self._math._setinsitu(insitu)
[513]1236 varlist = vars()
[876]1237 s = scantable(self._math._opacity(self, tau))
1238 s._add_history("opacity", varlist)
1239 print_log()
1240 if insitu: self._assign(s)
1241 else: return s
[513]1242
1243 def bin(self, width=5, insitu=None):
1244 """
1245 Return a scan where all spectra have been binned up.
[1348]1246 Parameters:
[513]1247 width: The bin width (default=5) in pixels
1248 insitu: if False a new scantable is returned.
1249 Otherwise, the scaling is done in-situ
1250 The default is taken from .asaprc (False)
1251 """
1252 if insitu is None: insitu = rcParams['insitu']
[876]1253 self._math._setinsitu(insitu)
[513]1254 varlist = vars()
[876]1255 s = scantable(self._math._bin(self, width))
[1118]1256 s._add_history("bin", varlist)
[876]1257 print_log()
1258 if insitu: self._assign(s)
1259 else: return s
[513]1260
[710]1261
[513]1262 def resample(self, width=5, method='cubic', insitu=None):
1263 """
[1348]1264 Return a scan where all spectra have been binned up.
1265
1266 Parameters:
[513]1267 width: The bin width (default=5) in pixels
1268 method: Interpolation method when correcting from a table.
1269 Values are "nearest", "linear", "cubic" (default)
1270 and "spline"
1271 insitu: if False a new scantable is returned.
1272 Otherwise, the scaling is done in-situ
1273 The default is taken from .asaprc (False)
1274 """
1275 if insitu is None: insitu = rcParams['insitu']
[876]1276 self._math._setinsitu(insitu)
[513]1277 varlist = vars()
[876]1278 s = scantable(self._math._resample(self, method, width))
[1118]1279 s._add_history("resample", varlist)
[876]1280 print_log()
1281 if insitu: self._assign(s)
1282 else: return s
[513]1283
1284
[946]1285 def average_pol(self, mask=None, weight='none'):
1286 """
1287 Average the Polarisations together.
1288 Parameters:
1289 mask: An optional mask defining the region, where the
1290 averaging will be applied. The output will have all
1291 specified points masked.
1292 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1293 weighted), or 'tsys' (1/Tsys**2 weighted)
1294 """
1295 varlist = vars()
1296 if mask is None:
1297 mask = ()
[1010]1298 s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]1299 s._add_history("average_pol", varlist)
[946]1300 print_log()
[992]1301 return s
[513]1302
[1145]1303 def average_beam(self, mask=None, weight='none'):
1304 """
1305 Average the Beams together.
1306 Parameters:
1307 mask: An optional mask defining the region, where the
1308 averaging will be applied. The output will have all
1309 specified points masked.
1310 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1311 weighted), or 'tsys' (1/Tsys**2 weighted)
1312 """
1313 varlist = vars()
1314 if mask is None:
1315 mask = ()
1316 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1317 s._add_history("average_beam", varlist)
1318 print_log()
1319 return s
1320
[992]1321 def convert_pol(self, poltype=None):
1322 """
1323 Convert the data to a different polarisation type.
1324 Parameters:
1325 poltype: The new polarisation type. Valid types are:
1326 "linear", "stokes" and "circular"
1327 """
1328 varlist = vars()
1329 try:
1330 s = scantable(self._math._convertpol(self, poltype))
[1118]1331 except RuntimeError, msg:
[992]1332 if rcParams['verbose']:
[1118]1333 print msg
1334 return
[992]1335 else:
1336 raise
[1118]1337 s._add_history("convert_pol", varlist)
[992]1338 print_log()
1339 return s
1340
[876]1341 def smooth(self, kernel="hanning", width=5.0, insitu=None):
[513]1342 """
1343 Smooth the spectrum by the specified kernel (conserving flux).
1344 Parameters:
1345 kernel: The type of smoothing kernel. Select from
[1373]1346 'hanning' (default), 'gaussian', 'boxcar' and
1347 'rmedian'
[513]1348 width: The width of the kernel in pixels. For hanning this is
1349 ignored otherwise it defauls to 5 pixels.
1350 For 'gaussian' it is the Full Width Half
1351 Maximum. For 'boxcar' it is the full width.
[1373]1352 For 'rmedian' it is the half width.
[513]1353 insitu: if False a new scantable is returned.
1354 Otherwise, the scaling is done in-situ
1355 The default is taken from .asaprc (False)
1356 Example:
1357 none
1358 """
1359 if insitu is None: insitu = rcParams['insitu']
[876]1360 self._math._setinsitu(insitu)
[513]1361 varlist = vars()
[1118]1362 s = scantable(self._math._smooth(self, kernel.lower(), width))
[876]1363 s._add_history("smooth", varlist)
1364 print_log()
1365 if insitu: self._assign(s)
1366 else: return s
[513]1367
[876]1368
[1389]1369 def poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None):
[513]1370 """
1371 Return a scan which has been baselined (all rows) by a polynomial.
1372 Parameters:
[794]1373 mask: an optional mask
1374 order: the order of the polynomial (default is 0)
[1061]1375 plot: plot the fit and the residual. In this each
1376 indivual fit has to be approved, by typing 'y'
1377 or 'n'
[1389]1378 uselin: use linear polynomial fit
[794]1379 insitu: if False a new scantable is returned.
1380 Otherwise, the scaling is done in-situ
1381 The default is taken from .asaprc (False)
[513]1382 Example:
1383 # return a scan baselined by a third order polynomial,
1384 # not using a mask
1385 bscan = scan.poly_baseline(order=3)
[579]1386 """
[513]1387 if insitu is None: insitu = rcParams['insitu']
1388 varlist = vars()
1389 if mask is None:
[1295]1390 mask = [True for i in xrange(self.nchan(-1))]
[513]1391 from asap.asapfitter import fitter
[1217]1392 try:
1393 f = fitter()
1394 f.set_scan(self, mask)
[1389]1395 if uselin:
1396 f.set_function(lpoly=order)
1397 else:
1398 f.set_function(poly=order)
[1217]1399 s = f.auto_fit(insitu, plot=plot)
[1446]1400 # Save parameters of baseline fits as a class attribute.
1401 # NOTICE: It does not reflect changes in scantable!
1402 self.blpars = f.blpars
[1217]1403 s._add_history("poly_baseline", varlist)
1404 print_log()
1405 if insitu: self._assign(s)
1406 else: return s
1407 except RuntimeError:
1408 msg = "The fit failed, possibly because it didn't converge."
1409 if rcParams['verbose']:
1410 print msg
1411 return
1412 else:
1413 raise RuntimeError(msg)
[513]1414
[1217]1415
[1118]1416 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
[1280]1417 threshold=3, chan_avg_limit=1, plot=False,
1418 insitu=None):
[880]1419 """
1420 Return a scan which has been baselined (all rows) by a polynomial.
1421 Spectral lines are detected first using linefinder and masked out
1422 to avoid them affecting the baseline solution.
1423
1424 Parameters:
1425 mask: an optional mask retreived from scantable
1426 edge: an optional number of channel to drop at
1427 the edge of spectrum. If only one value is
1428 specified, the same number will be dropped from
1429 both sides of the spectrum. Default is to keep
[907]1430 all channels. Nested tuples represent individual
[976]1431 edge selection for different IFs (a number of spectral
1432 channels can be different)
[880]1433 order: the order of the polynomial (default is 0)
1434 threshold: the threshold used by line finder. It is better to
1435 keep it large as only strong lines affect the
1436 baseline solution.
[1280]1437 chan_avg_limit:
1438 a maximum number of consequtive spectral channels to
1439 average during the search of weak and broad lines.
1440 The default is no averaging (and no search for weak
1441 lines). If such lines can affect the fitted baseline
1442 (e.g. a high order polynomial is fitted), increase this
1443 parameter (usually values up to 8 are reasonable). Most
1444 users of this method should find the default value
1445 sufficient.
[1061]1446 plot: plot the fit and the residual. In this each
1447 indivual fit has to be approved, by typing 'y'
1448 or 'n'
[880]1449 insitu: if False a new scantable is returned.
1450 Otherwise, the scaling is done in-situ
1451 The default is taken from .asaprc (False)
1452
1453 Example:
1454 scan2=scan.auto_poly_baseline(order=7)
1455 """
1456 if insitu is None: insitu = rcParams['insitu']
1457 varlist = vars()
1458 from asap.asapfitter import fitter
1459 from asap.asaplinefind import linefinder
1460 from asap import _is_sequence_or_number as _is_valid
1461
[976]1462 # check whether edge is set up for each IF individually
[1118]1463 individualedge = False;
1464 if len(edge) > 1:
1465 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1466 individualedge = True;
[907]1467
[1118]1468 if not _is_valid(edge, int) and not individualedge:
[909]1469 raise ValueError, "Parameter 'edge' has to be an integer or a \
[907]1470 pair of integers specified as a tuple. Nested tuples are allowed \
1471 to make individual selection for different IFs."
[919]1472
[1118]1473 curedge = (0, 0)
1474 if individualedge:
1475 for edgepar in edge:
1476 if not _is_valid(edgepar, int):
1477 raise ValueError, "Each element of the 'edge' tuple has \
1478 to be a pair of integers or an integer."
[907]1479 else:
[1118]1480 curedge = edge;
[880]1481
1482 # setup fitter
1483 f = fitter()
1484 f.set_function(poly=order)
1485
1486 # setup line finder
[1118]1487 fl = linefinder()
[1268]1488 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
[880]1489
1490 if not insitu:
[1118]1491 workscan = self.copy()
[880]1492 else:
[1118]1493 workscan = self
[880]1494
[907]1495 fl.set_scan(workscan)
1496
[1118]1497 rows = range(workscan.nrow())
[1446]1498 # Save parameters of baseline fits & masklists as a class attribute.
1499 # NOTICE: It does not reflect changes in scantable!
1500 if len(rows) > 0:
1501 self.blpars=[]
1502 self.masklists=[]
[880]1503 asaplog.push("Processing:")
1504 for r in rows:
[1118]1505 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1506 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1507 workscan.getpol(r), workscan.getcycle(r))
[880]1508 asaplog.push(msg, False)
[907]1509
[976]1510 # figure out edge parameter
[1118]1511 if individualedge:
1512 if len(edge) >= workscan.getif(r):
1513 raise RuntimeError, "Number of edge elements appear to " \
1514 "be less than the number of IFs"
1515 curedge = edge[workscan.getif(r)]
[919]1516
[976]1517 # setup line finder
[1118]1518 fl.find_lines(r, mask, curedge)
[1446]1519 outmask=fl.get_mask()
[880]1520 f.set_scan(workscan, fl.get_mask())
1521 f.x = workscan._getabcissa(r)
1522 f.y = workscan._getspectrum(r)
1523 f.data = None
1524 f.fit()
[1446]1525
1526 # Show mask list
1527 masklist=workscan.get_masklist(fl.get_mask(),row=r)
1528 msg = "mask range: "+str(masklist)
1529 asaplog.push(msg, False)
1530
1531 fpar = f.get_parameters()
[1061]1532 if plot:
1533 f.plot(residual=True)
1534 x = raw_input("Accept fit ( [y]/n ): ")
1535 if x.upper() == 'N':
[1446]1536 self.blpars.append(None)
1537 self.masklists.append(None)
[1061]1538 continue
[880]1539 workscan._setspectrum(f.fitter.getresidual(), r)
[1446]1540 self.blpars.append(fpar)
1541 self.masklists.append(masklist)
[1061]1542 if plot:
1543 f._p.unmap()
1544 f._p = None
1545 workscan._add_history("auto_poly_baseline", varlist)
[880]1546 if insitu:
1547 self._assign(workscan)
1548 else:
1549 return workscan
1550
[914]1551 def rotate_linpolphase(self, angle):
1552 """
1553 Rotate the phase of the complex polarization O=Q+iU correlation.
1554 This is always done in situ in the raw data. So if you call this
1555 function more than once then each call rotates the phase further.
1556 Parameters:
1557 angle: The angle (degrees) to rotate (add) by.
1558 Examples:
1559 scan.rotate_linpolphase(2.3)
1560 """
1561 varlist = vars()
[936]1562 self._math._rotate_linpolphase(self, angle)
[914]1563 self._add_history("rotate_linpolphase", varlist)
1564 print_log()
1565 return
[710]1566
[513]1567
[914]1568 def rotate_xyphase(self, angle):
1569 """
1570 Rotate the phase of the XY correlation. This is always done in situ
1571 in the data. So if you call this function more than once
1572 then each call rotates the phase further.
1573 Parameters:
1574 angle: The angle (degrees) to rotate (add) by.
1575 Examples:
1576 scan.rotate_xyphase(2.3)
1577 """
1578 varlist = vars()
[936]1579 self._math._rotate_xyphase(self, angle)
[914]1580 self._add_history("rotate_xyphase", varlist)
1581 print_log()
1582 return
1583
1584 def swap_linears(self):
1585 """
[1348]1586 Swap the linear polarisations XX and YY, or better the first two
1587 polarisations as this also works for ciculars.
[914]1588 """
1589 varlist = vars()
[936]1590 self._math._swap_linears(self)
[914]1591 self._add_history("swap_linears", varlist)
1592 print_log()
1593 return
1594
1595 def invert_phase(self):
1596 """
1597 Invert the phase of the complex polarisation
1598 """
1599 varlist = vars()
[936]1600 self._math._invert_phase(self)
[914]1601 self._add_history("invert_phase", varlist)
1602 print_log()
1603 return
1604
[876]1605 def add(self, offset, insitu=None):
[513]1606 """
1607 Return a scan where all spectra have the offset added
1608 Parameters:
1609 offset: the offset
1610 insitu: if False a new scantable is returned.
1611 Otherwise, the scaling is done in-situ
1612 The default is taken from .asaprc (False)
1613 """
1614 if insitu is None: insitu = rcParams['insitu']
[876]1615 self._math._setinsitu(insitu)
[513]1616 varlist = vars()
[876]1617 s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]1618 s._add_history("add", varlist)
[876]1619 print_log()
1620 if insitu:
1621 self._assign(s)
1622 else:
[513]1623 return s
1624
[1308]1625 def scale(self, factor, tsys=True, insitu=None):
[513]1626 """
1627 Return a scan where all spectra are scaled by the give 'factor'
1628 Parameters:
1629 factor: the scaling factor
1630 insitu: if False a new scantable is returned.
1631 Otherwise, the scaling is done in-situ
1632 The default is taken from .asaprc (False)
1633 tsys: if True (default) then apply the operation to Tsys
1634 as well as the data
1635 """
1636 if insitu is None: insitu = rcParams['insitu']
[876]1637 self._math._setinsitu(insitu)
[513]1638 varlist = vars()
[876]1639 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
[1118]1640 s._add_history("scale", varlist)
[876]1641 print_log()
1642 if insitu:
1643 self._assign(s)
1644 else:
[513]1645 return s
1646
[1348]1647 def auto_quotient(self, preserve=True, mode='paired'):
[670]1648 """
1649 This function allows to build quotients automatically.
1650 It assumes the observation to have the same numer of
1651 "ons" and "offs"
1652 Parameters:
[710]1653 preserve: you can preserve (default) the continuum or
1654 remove it. The equations used are
[670]1655 preserve: Output = Toff * (on/off) - Toff
[1070]1656 remove: Output = Toff * (on/off) - Ton
[1348]1657 mode: the on/off detection mode
1658 'paired' (default)
1659 identifies 'off' scans by the
1660 trailing '_R' (Mopra/Parkes) or
1661 '_e'/'_w' (Tid) and matches
1662 on/off pairs from the observing pattern
1663 'time'
1664 finds the closest off in time
1665
[670]1666 """
[1348]1667 modes = ["time", "paired"]
[670]1668 if not mode in modes:
[876]1669 msg = "please provide valid mode. Valid modes are %s" % (modes)
1670 raise ValueError(msg)
1671 varlist = vars()
[1348]1672 s = None
1673 if mode.lower() == "paired":
1674 basesel = self.get_selection()
[1356]1675 sel = selector()+basesel
1676 sel.set_query("SRCTYPE==1")
1677 self.set_selection(sel)
[1348]1678 offs = self.copy()
1679 sel.set_query("SRCTYPE==0")
[1356]1680 self.set_selection(sel)
[1348]1681 ons = self.copy()
1682 s = scantable(self._math._quotient(ons, offs, preserve))
1683 self.set_selection(basesel)
1684 elif mode.lower() == "time":
1685 s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]1686 s._add_history("auto_quotient", varlist)
[876]1687 print_log()
1688 return s
[710]1689
[1145]1690 def mx_quotient(self, mask = None, weight='median', preserve=True):
[1141]1691 """
[1143]1692 Form a quotient using "off" beams when observing in "MX" mode.
1693 Parameters:
[1145]1694 mask: an optional mask to be used when weight == 'stddev'
[1143]1695 weight: How to average the off beams. Default is 'median'.
[1145]1696 preserve: you can preserve (default) the continuum or
1697 remove it. The equations used are
1698 preserve: Output = Toff * (on/off) - Toff
1699 remove: Output = Toff * (on/off) - Ton
[1217]1700 """
[1143]1701 if mask is None: mask = ()
[1141]1702 varlist = vars()
1703 on = scantable(self._math._mx_extract(self, 'on'))
[1143]1704 preoff = scantable(self._math._mx_extract(self, 'off'))
1705 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1217]1706 from asapmath import quotient
[1145]1707 q = quotient(on, off, preserve)
[1143]1708 q._add_history("mx_quotient", varlist)
[1145]1709 print_log()
[1217]1710 return q
[513]1711
[718]1712 def freq_switch(self, insitu=None):
1713 """
1714 Apply frequency switching to the data.
1715 Parameters:
1716 insitu: if False a new scantable is returned.
1717 Otherwise, the swictching is done in-situ
1718 The default is taken from .asaprc (False)
1719 Example:
1720 none
1721 """
1722 if insitu is None: insitu = rcParams['insitu']
[876]1723 self._math._setinsitu(insitu)
[718]1724 varlist = vars()
[876]1725 s = scantable(self._math._freqswitch(self))
[1118]1726 s._add_history("freq_switch", varlist)
[876]1727 print_log()
1728 if insitu: self._assign(s)
1729 else: return s
[718]1730
[780]1731 def recalc_azel(self):
1732 """
1733 Recalculate the azimuth and elevation for each position.
1734 Parameters:
1735 none
1736 Example:
1737 """
1738 varlist = vars()
[876]1739 self._recalcazel()
[780]1740 self._add_history("recalc_azel", varlist)
1741 print_log()
1742 return
1743
[513]1744 def __add__(self, other):
1745 varlist = vars()
1746 s = None
1747 if isinstance(other, scantable):
[1308]1748 s = scantable(self._math._binaryop(self, other, "ADD"))
[513]1749 elif isinstance(other, float):
[876]1750 s = scantable(self._math._unaryop(self, other, "ADD", False))
[513]1751 else:
[718]1752 raise TypeError("Other input is not a scantable or float value")
[513]1753 s._add_history("operator +", varlist)
[718]1754 print_log()
[513]1755 return s
1756
1757 def __sub__(self, other):
1758 """
1759 implicit on all axes and on Tsys
1760 """
1761 varlist = vars()
1762 s = None
1763 if isinstance(other, scantable):
[1308]1764 s = scantable(self._math._binaryop(self, other, "SUB"))
[513]1765 elif isinstance(other, float):
[876]1766 s = scantable(self._math._unaryop(self, other, "SUB", False))
[513]1767 else:
[718]1768 raise TypeError("Other input is not a scantable or float value")
[513]1769 s._add_history("operator -", varlist)
[718]1770 print_log()
[513]1771 return s
[710]1772
[513]1773 def __mul__(self, other):
1774 """
1775 implicit on all axes and on Tsys
1776 """
1777 varlist = vars()
1778 s = None
1779 if isinstance(other, scantable):
[1308]1780 s = scantable(self._math._binaryop(self, other, "MUL"))
[513]1781 elif isinstance(other, float):
[876]1782 s = scantable(self._math._unaryop(self, other, "MUL", False))
[513]1783 else:
[718]1784 raise TypeError("Other input is not a scantable or float value")
[513]1785 s._add_history("operator *", varlist)
[718]1786 print_log()
[513]1787 return s
1788
[710]1789
[513]1790 def __div__(self, other):
1791 """
1792 implicit on all axes and on Tsys
1793 """
1794 varlist = vars()
1795 s = None
1796 if isinstance(other, scantable):
[1308]1797 s = scantable(self._math._binaryop(self, other, "DIV"))
[513]1798 elif isinstance(other, float):
1799 if other == 0.0:
[718]1800 raise ZeroDivisionError("Dividing by zero is not recommended")
[876]1801 s = scantable(self._math._unaryop(self, other, "DIV", False))
[513]1802 else:
[718]1803 raise TypeError("Other input is not a scantable or float value")
[513]1804 s._add_history("operator /", varlist)
[718]1805 print_log()
[513]1806 return s
1807
[530]1808 def get_fit(self, row=0):
1809 """
1810 Print or return the stored fits for a row in the scantable
1811 Parameters:
1812 row: the row which the fit has been applied to.
1813 """
1814 if row > self.nrow():
1815 return
[976]1816 from asap.asapfit import asapfit
[530]1817 fit = asapfit(self._getfit(row))
[718]1818 if rcParams['verbose']:
[530]1819 print fit
1820 return
1821 else:
1822 return fit.as_dict()
1823
[484]1824 def _add_history(self, funcname, parameters):
1825 # create date
1826 sep = "##"
1827 from datetime import datetime
1828 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1829 hist = dstr+sep
1830 hist += funcname+sep#cdate+sep
1831 if parameters.has_key('self'): del parameters['self']
[1118]1832 for k, v in parameters.iteritems():
[484]1833 if type(v) is dict:
[1118]1834 for k2, v2 in v.iteritems():
[484]1835 hist += k2
1836 hist += "="
[1118]1837 if isinstance(v2, scantable):
[484]1838 hist += 'scantable'
1839 elif k2 == 'mask':
[1118]1840 if isinstance(v2, list) or isinstance(v2, tuple):
[513]1841 hist += str(self._zip_mask(v2))
1842 else:
1843 hist += str(v2)
[484]1844 else:
[513]1845 hist += str(v2)
[484]1846 else:
1847 hist += k
1848 hist += "="
[1118]1849 if isinstance(v, scantable):
[484]1850 hist += 'scantable'
1851 elif k == 'mask':
[1118]1852 if isinstance(v, list) or isinstance(v, tuple):
[513]1853 hist += str(self._zip_mask(v))
1854 else:
1855 hist += str(v)
[484]1856 else:
1857 hist += str(v)
1858 hist += sep
1859 hist = hist[:-2] # remove trailing '##'
1860 self._addhistory(hist)
1861
[710]1862
[484]1863 def _zip_mask(self, mask):
1864 mask = list(mask)
1865 i = 0
1866 segments = []
1867 while mask[i:].count(1):
1868 i += mask[i:].index(1)
1869 if mask[i:].count(0):
1870 j = i + mask[i:].index(0)
1871 else:
[710]1872 j = len(mask)
[1118]1873 segments.append([i, j])
[710]1874 i = j
[484]1875 return segments
[714]1876
[626]1877 def _get_ordinate_label(self):
1878 fu = "("+self.get_fluxunit()+")"
1879 import re
1880 lbl = "Intensity"
[1118]1881 if re.match(".K.", fu):
[626]1882 lbl = "Brightness Temperature "+ fu
[1118]1883 elif re.match(".Jy.", fu):
[626]1884 lbl = "Flux density "+ fu
1885 return lbl
[710]1886
[876]1887 def _check_ifs(self):
1888 nchans = [self.nchan(i) for i in range(self.nif(-1))]
[889]1889 nchans = filter(lambda t: t > 0, nchans)
[876]1890 return (sum(nchans)/len(nchans) == nchans[0])
[976]1891
1892 def _fill(self, names, unit, average):
1893 import os
1894 from asap._asap import stfiller
1895 first = True
1896 fullnames = []
1897 for name in names:
1898 name = os.path.expandvars(name)
1899 name = os.path.expanduser(name)
1900 if not os.path.exists(name):
1901 msg = "File '%s' does not exists" % (name)
1902 if rcParams['verbose']:
1903 asaplog.push(msg)
1904 print asaplog.pop().strip()
1905 return
1906 raise IOError(msg)
1907 fullnames.append(name)
1908 if average:
1909 asaplog.push('Auto averaging integrations')
[1079]1910 stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]1911 for name in fullnames:
[1073]1912 tbl = Scantable(stype)
1913 r = stfiller(tbl)
[976]1914 msg = "Importing %s..." % (name)
[1118]1915 asaplog.push(msg, False)
[976]1916 print_log()
[1118]1917 r._open(name, -1, -1)
[976]1918 r._read()
[1073]1919 #tbl = r._getdata()
[976]1920 if average:
[1118]1921 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]1922 #tbl = tbl2
1923 if not first:
1924 tbl = self._math._merge([self, tbl])
1925 #tbl = tbl2
1926 Scantable.__init__(self, tbl)
1927 r._close()
[1118]1928 del r, tbl
[976]1929 first = False
1930 if unit is not None:
1931 self.set_fluxunit(unit)
[1446]1932 #self.set_freqframe(rcParams['scantable.freqframe'])
[976]1933
Note: See TracBrowser for help on using the repository browser.