source: trunk/python/scantable.py@ 958

Last change on this file since 958 was 946, checked in by mar637, 19 years ago

re-enabled average_pol; made use of selector class

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 52.4 KB
RevLine 
[876]1from asap._asap import Scantable
[226]2from asap import rcParams
[714]3from asap import print_log
[946]4from asap import selector
[189]5from numarray import ones,zeros
[102]6import sys
7
[876]8class scantable(Scantable):
[102]9 """
10 The ASAP container for scans
11 """
[710]12
[484]13 def __init__(self, filename, average=None, unit=None):
[102]14 """
15 Create a scantable from a saved one or make a reference
16 Parameters:
[181]17 filename: the name of an asap table on disk
18 or
19 the name of a rpfits/sdfits/ms file
20 (integrations within scans are auto averaged
21 and the whole file is read)
22 or
23 [advanced] a reference to an existing
[102]24 scantable
[484]25 average: average all integrations withinb a scan on read.
26 The default (True) is taken from .asaprc.
27 unit: brightness unit; must be consistent with K or Jy.
[340]28 Over-rides the default selected by the reader
29 (input rpfits/sdfits/ms) or replaces the value
30 in existing scantables
[710]31 """
[489]32 if average is None or type(average) is not bool:
[710]33 average = rcParams['scantable.autoaverage']
[489]34
[484]35 varlist = vars()
[876]36 from asap._asap import stmath
37 self._math = stmath()
[710]38 from asap import asaplog
[876]39 if isinstance(filename, Scantable):
40 Scantable.__init__(self, filename)
[340]41 if unit is not None:
[710]42 self.set_fluxunit(unit)
[181]43 else:
[411]44 import os.path
45 if not os.path.exists(filename):
[710]46 s = "File '%s' not found." % (filename)
47 if rcParams['verbose']:
48 asaplog.push(s)
49 print asaplog.pop().strip()
50 return
51 raise IOError(s)
[411]52 filename = os.path.expandvars(filename)
53 if os.path.isdir(filename):
[181]54 # crude check if asap table
[411]55 if os.path.exists(filename+'/table.info'):
[876]56 Scantable.__init__(self, filename, "memory")
[340]57 if unit is not None:
[710]58 self.set_fluxunit(unit)
[181]59 else:
[718]60 msg = "The given file '%s'is not a valid asap table." % (filename)
61 if rcParams['verbose']:
62 print msg
63 return
64 else:
65 raise IOError(msg)
[710]66 else:
[876]67 from asap._asap import stfiller
[340]68 ifSel = -1
69 beamSel = -1
[876]70 r = stfiller()
[710]71 r._open(filename,ifSel,beamSel)
72 asaplog.push('Importing data...')
[762]73 print_log()
[876]74 r._read()
[411]75 tbl = r._getdata()
[346]76 if unit is not None:
77 tbl.set_fluxunit(unit)
[489]78 if average:
[710]79 asaplog.push('Auto averaging integrations...')
[762]80 print_log()
[876]81 tbl2 = self._math._average((tbl,),(),'NONE','SCAN', False)
82 Scantable.__init__(self,tbl2)
[513]83 del tbl2
[226]84 else:
[876]85 Scantable.__init__(self,tbl)
[513]86 del r,tbl
[484]87 self._add_history("scantable", varlist)
[714]88 print_log()
[102]89
[876]90 def save(self, name=None, format=None, overwrite=False):
[116]91 """
[710]92 Store the scantable on disk. This can be an asap (aips++) Table, SDFITS,
[280]93 Image FITS or MS2 format.
[116]94 Parameters:
[196]95 name: the name of the outputfile. For format="FITS" this
[489]96 is the directory file name into which all the files
[497]97 will be written (default is 'asap_FITS'). For format
98 "ASCII" this is the root file name (data in 'name'.txt
99 and header in 'name'_header.txt)
[116]100 format: an optional file format. Default is ASAP.
[280]101 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
[194]102 'SDFITS' (save as SDFITS file)
[710]103 'FITS' (saves each row as a FITS Image)
[200]104 'ASCII' (saves as ascii text file)
[226]105 'MS2' (saves as an aips++
106 MeasurementSet V2)
[411]107 overwrite: If the file should be overwritten if it exists.
[256]108 The default False is to return with warning
[411]109 without writing the output. USE WITH CARE.
[116]110 Example:
111 scan.save('myscan.asap')
112 scan.save('myscan.sdfits','SDFITS')
113 """
[411]114 from os import path
[226]115 if format is None: format = rcParams['scantable.save']
[256]116 suffix = '.'+format.lower()
117 if name is None or name =="":
118 name = 'scantable'+suffix
[718]119 from asap import asaplog
120 msg = "No filename given. Using default name %s..." % name
121 asaplog.push(msg)
[411]122 name = path.expandvars(name)
[256]123 if path.isfile(name) or path.isdir(name):
124 if not overwrite:
[718]125 msg = "File %s exists." % name
126 if rcParams['verbose']:
127 print msg
128 return
129 else:
130 raise IOError(msg)
[451]131 format2 = format.upper()
132 if format2 == 'ASAP':
[116]133 self._save(name)
134 else:
[876]135 print "NYI"
136# from asap._asap import sdwriter as _sw
137# w = _sw(format2)
138# w.write(self, name, stokes)
[718]139
140 print_log()
[116]141 return
142
[102]143 def copy(self):
144 """
145 Return a copy of this scantable.
146 Parameters:
[113]147 none
[102]148 Example:
149 copiedscan = scan.copy()
150 """
[876]151 sd = scantable(Scantable._copy(self))
[113]152 return sd
153
[102]154 def get_scan(self, scanid=None):
155 """
156 Return a specific scan (by scanno) or collection of scans (by
157 source name) in a new scantable.
158 Parameters:
[513]159 scanid: a (list of) scanno or a source name, unix-style
160 patterns are accepted for source name matching, e.g.
161 '*_R' gets all 'ref scans
[102]162 Example:
[513]163 # get all scans containing the source '323p459'
164 newscan = scan.get_scan('323p459')
165 # get all 'off' scans
166 refscans = scan.get_scan('*_R')
167 # get a susbset of scans by scanno (as listed in scan.summary())
168 newscan = scan.get_scan([0,2,7,10])
[102]169 """
170 if scanid is None:
[718]171 if rcParams['verbose']:
172 print "Please specify a scan no or name to retrieve from the scantable"
173 return
174 else:
175 raise RuntimeError("No scan given")
176
[102]177 try:
[946]178 bsel = self.get_selection()
179 sel = selector()
[102]180 if type(scanid) is str:
[946]181 sel.set_name(scanid)
182 self.set_selection(bsel+sel)
[876]183 scopy = self._copy()
[946]184 self.set_selection(bsel)
[876]185 return scantable(scopy)
[102]186 elif type(scanid) is int:
[946]187 sel.set_scans([scanid])
188 self.set_selection(bsel+sel)
[876]189 scopy = self._copy()
[946]190 self.set_selection(bsel)
[876]191 return scantable(scopy)
[381]192 elif type(scanid) is list:
[946]193 sel.set_scans(scanid)
194 self.set_selection(sel)
[876]195 scopy = self._copy()
[946]196 self.set_selection(bsel)
[876]197 return scantable(scopy)
[381]198 else:
[718]199 msg = "Illegal scanid type, use 'int' or 'list' if ints."
200 if rcParams['verbose']:
201 print msg
202 else:
203 raise TypeError(msg)
[102]204 except RuntimeError:
[718]205 if rcParams['verbose']: print "Couldn't find any match."
206 else: raise
[102]207
208 def __str__(self):
[876]209 return Scantable._summary(self,True)
[102]210
[876]211 def summary(self, filename=None, verbose=None):
[102]212 """
213 Print a summary of the contents of this scantable.
214 Parameters:
215 filename: the name of a file to write the putput to
216 Default - no file output
[381]217 verbose: print extra info such as the frequency table
218 The default (False) is taken from .asaprc
[102]219 """
[876]220 info = Scantable._summary(self, verbose)
[381]221 if verbose is None: verbose = rcParams['scantable.verbosesummary']
[102]222 if filename is not None:
[256]223 if filename is "":
224 filename = 'scantable_summary.txt'
[415]225 from os.path import expandvars, isdir
[411]226 filename = expandvars(filename)
[415]227 if not isdir(filename):
[413]228 data = open(filename, 'w')
229 data.write(info)
230 data.close()
231 else:
[718]232 msg = "Illegal file name '%s'." % (filename)
233 if rcParams['verbose']:
234 print msg
235 else:
236 raise IOError(msg)
237 if rcParams['verbose']:
[794]238 try:
239 from IPython.genutils import page as pager
240 except ImportError:
241 from pydoc import pager
242 pager(info)
[718]243 else:
244 return info
[710]245
[946]246
247 def get_selection(self):
248 """
249 """
250 return selector(self._getselection())
251
252 def set_selection(self, selection):
253 """
254 """
255 self._setselection(selection)
256
[553]257 def set_cursor(self, beam=0, IF=0, pol=0):
[102]258 """
259 Set the spectrum for individual operations.
260 Parameters:
[553]261 beam, IF, pol: a number
[102]262 Example:
[256]263 scan.set_cursor(0,0,1)
[135]264 pol1sig = scan.stats(all=False) # returns std dev for beam=0
[181]265 # if=0, pol=1
[102]266 """
[876]267 print "DEPRECATED"
[484]268 varlist = vars()
[876]269 sel = asap._asap.Selector()
270 sel._setbeams([beam])
271 sel._setpols([pol])
272 sel._setifs([IF])
273 self._add_history("set_cursor", varlist)
[102]274 return
275
[256]276 def get_cursor(self):
[113]277 """
278 Return/print a the current 'cursor' into the Beam/IF/Pol cube.
279 Parameters:
280 none
281 Returns:
282 a list of values (currentBeam,currentIF,currentPol)
283 Example:
284 none
285 """
[876]286 print "DEPRECATED"
287 sel = self._getselection()
288 i = sel.getbeams()[0]
289 j = sel.getifs()[0]
290 k = sel.getpols()[0]
[721]291 from asap import asaplog
292 out = "--------------------------------------------------\n"
293 out += " Cursor position\n"
294 out += "--------------------------------------------------\n"
295 out += 'Beam=%d IF=%d Pol=%d ' % (i,j,k)
296 asaplog.push(out)
297 print_log()
298 return i,j,k
[102]299
[876]300 def stats(self, stat='stddev', mask=None):
[102]301 """
[135]302 Determine the specified statistic of the current beam/if/pol
[102]303 Takes a 'mask' as an optional parameter to specify which
304 channels should be excluded.
305 Parameters:
[135]306 stat: 'min', 'max', 'sumsq', 'sum', 'mean'
307 'var', 'stddev', 'avdev', 'rms', 'median'
308 mask: an optional mask specifying where the statistic
[102]309 should be determined.
310 Example:
[113]311 scan.set_unit('channel')
[102]312 msk = scan.create_mask([100,200],[500,600])
[135]313 scan.stats(stat='mean', mask=m)
[102]314 """
[256]315 from numarray import array,zeros,Float
[102]316 if mask == None:
[876]317 mask = []
[256]318 axes = ['Beam','IF','Pol','Time']
[876]319 if not self._check_ifs():
320 raise ValueError("Cannot apply mask as the IFs have different number of channels"
321 "Please use setselection() to select individual IFs")
[256]322
[876]323 statvals = self._math._stats(self, mask, stat)
324 out = ''
325 axes = []
326 for i in range(self.nrow()):
327 axis = []
328 axis.append(self.getscan(i))
329 axis.append(self.getbeam(i))
330 axis.append(self.getif(i))
331 axis.append(self.getpol(i))
332 axis.append(self.getcycle(i))
333 axes.append(axis)
334 tm = self._gettime(i)
335 src = self._getsourcename(i)
336 out += 'Scan[%d] (%s) ' % (axis[0], src)
337 out += 'Time[%s]:\n' % (tm)
338 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
339 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
340 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
341 out += '= %3.3f\n' % (statvals[i])
342 out += "--------------------------------------------------\n"
[256]343
[876]344 if rcParams['verbose']:
345 print "--------------------------------------------------"
346 print " ",stat
347 print "--------------------------------------------------"
348 print out
349 retval = { 'axesnames': ['scanno','beamno','ifno','polno','cycleno'],
350 'axes' : axes,
351 'data': statvals}
352 return retval
[102]353
[876]354 def stddev(self,mask=None):
[135]355 """
356 Determine the standard deviation of the current beam/if/pol
357 Takes a 'mask' as an optional parameter to specify which
358 channels should be excluded.
359 Parameters:
360 mask: an optional mask specifying where the standard
361 deviation should be determined.
362
363 Example:
364 scan.set_unit('channel')
365 msk = scan.create_mask([100,200],[500,600])
366 scan.stddev(mask=m)
367 """
[876]368 return self.stats(stat='stddev',mask=mask);
[135]369
[876]370 def get_tsys(self):
[113]371 """
372 Return the System temperatures.
373 Parameters:
[876]374
[113]375 Returns:
[876]376 a list of Tsys values for the current selection
[113]377 """
[256]378
[876]379 return self._row_callback(self._gettsys, "Tsys")
[256]380
[876]381 def _row_callback(self, callback, label):
382 axes = []
383 axesnames = ['scanno','beamno','ifno','polno','cycleno']
384 out = ""
385 outvec =[]
386 for i in range(self.nrow()):
387 axis = []
388 axis.append(self.getscan(i))
389 axis.append(self.getbeam(i))
390 axis.append(self.getif(i))
391 axis.append(self.getpol(i))
392 axis.append(self.getcycle(i))
393 axes.append(axis)
394 tm = self._gettime(i)
395 src = self._getsourcename(i)
396 out += 'Scan[%d] (%s) ' % (axis[0], src)
397 out += 'Time[%s]:\n' % (tm)
398 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
399 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
400 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
401 outvec.append(callback(i))
402 out += '= %3.3f\n' % (outvec[i])
403 out += "--------------------------------------------------\n"
404 if rcParams['verbose']:
405 print "--------------------------------------------------"
406 print " %s" % (label)
407 print "--------------------------------------------------"
408 print out
409 retval = {'axesnames': axesnames, 'axes': axes, 'data': outvec}
410 return retval
[256]411
412
[407]413 def get_time(self, row=-1):
[113]414 """
415 Get a list of time stamps for the observations.
[181]416 Return a string for each integration in the scantable.
[113]417 Parameters:
[407]418 row: row no of integration. Default -1 return all rows
[113]419 Example:
420 none
421 """
422 out = []
[407]423 if row == -1:
424 for i in range(self.nrow()):
425 out.append(self._gettime(i))
426 return out
427 else:
428 if row < self.nrow():
429 return self._gettime(row)
[102]430
[714]431 def get_sourcename(self, row=-1):
432 """
[794]433 Get a list source names for the observations.
[714]434 Return a string for each integration in the scantable.
435 Parameters:
436 row: row no of integration. Default -1 return all rows
437 Example:
438 none
439 """
440 out = []
441 if row == -1:
442 return [self._getsourcename(i) for i in range(self.nrow())]
443 else:
444 if 0 <= row < self.nrow():
445 return self._getsourcename(row)
446
[794]447 def get_elevation(self, row=-1):
448 """
449 Get a list of elevations for the observations.
450 Return a float for each integration in the scantable.
451 Parameters:
452 row: row no of integration. Default -1 return all rows
453 Example:
454 none
455 """
456 out = []
457 if row == -1:
458 return [self._getelevation(i) for i in range(self.nrow())]
459 else:
460 if 0 <= row < self.nrow():
461 return self._getelevation(row)
462
463 def get_azimuth(self, row=-1):
464 """
465 Get a list of azimuths for the observations.
466 Return a float for each integration in the scantable.
467 Parameters:
468 row: row no of integration. Default -1 return all rows
469 Example:
470 none
471 """
472 out = []
473 if row == -1:
474 return [self._getazimuth(i) for i in range(self.nrow())]
475 else:
476 if 0 <= row < self.nrow():
477 return self._getazimuth(row)
478
479 def get_parangle(self, row=-1):
480 """
481 Get a list of parallactic angles for the observations.
482 Return a float for each integration in the scantable.
483 Parameters:
484 row: row no of integration. Default -1 return all rows
485 Example:
486 none
487 """
488 out = []
489 if row == -1:
490 return [self._getparangle(i) for i in range(self.nrow())]
491 else:
492 if 0 <= row < self.nrow():
493 return self._getparangle(row)
494
[102]495 def set_unit(self, unit='channel'):
496 """
497 Set the unit for all following operations on this scantable
498 Parameters:
499 unit: optional unit, default is 'channel'
[113]500 one of '*Hz','km/s','channel', ''
[102]501 """
[484]502 varlist = vars()
[113]503 if unit in ['','pixel', 'channel']:
504 unit = ''
505 inf = list(self._getcoordinfo())
506 inf[0] = unit
507 self._setcoordinfo(inf)
[484]508 self._add_history("set_unit",varlist)
[113]509
[484]510 def set_instrument(self, instr):
[358]511 """
512 Set the instrument for subsequent processing
513 Parameters:
[710]514 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]515 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[358]516 """
517 self._setInstrument(instr)
[484]518 self._add_history("set_instument",vars())
[718]519 print_log()
[358]520
[276]521 def set_doppler(self, doppler='RADIO'):
522 """
523 Set the doppler for all following operations on this scantable.
524 Parameters:
525 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
526 """
[484]527 varlist = vars()
[276]528 inf = list(self._getcoordinfo())
529 inf[2] = doppler
530 self._setcoordinfo(inf)
[484]531 self._add_history("set_doppler",vars())
[718]532 print_log()
[710]533
[226]534 def set_freqframe(self, frame=None):
[113]535 """
536 Set the frame type of the Spectral Axis.
537 Parameters:
[591]538 frame: an optional frame type, default 'LSRK'. Valid frames are:
539 'REST','TOPO','LSRD','LSRK','BARY',
540 'GEO','GALACTO','LGROUP','CMB'
[113]541 Examples:
542 scan.set_freqframe('BARY')
543 """
[484]544 if frame is None: frame = rcParams['scantable.freqframe']
545 varlist = vars()
[113]546 valid = ['REST','TOPO','LSRD','LSRK','BARY', \
547 'GEO','GALACTO','LGROUP','CMB']
[591]548
[876]549 if 1:#frame in valid:
[113]550 inf = list(self._getcoordinfo())
551 inf[1] = frame
552 self._setcoordinfo(inf)
[484]553 self._add_history("set_freqframe",varlist)
[102]554 else:
[718]555 msg = "Please specify a valid freq type. Valid types are:\n",valid
556 if rcParams['verbose']:
557 print msg
558 else:
559 raise TypeError(msg)
560 print_log()
[710]561
[113]562 def get_unit(self):
563 """
564 Get the default unit set in this scantable
565 Parameters:
566 Returns:
567 A unit string
568 """
569 inf = self._getcoordinfo()
570 unit = inf[0]
571 if unit == '': unit = 'channel'
572 return unit
[102]573
[158]574 def get_abcissa(self, rowno=0):
[102]575 """
[158]576 Get the abcissa in the current coordinate setup for the currently
[113]577 selected Beam/IF/Pol
578 Parameters:
[226]579 rowno: an optional row number in the scantable. Default is the
580 first row, i.e. rowno=0
[113]581 Returns:
[407]582 The abcissa values and it's format string (as a dictionary)
[113]583 """
[256]584 abc = self._getabcissa(rowno)
[710]585 lbl = self._getabcissalabel(rowno)
[718]586 print_log()
[158]587 return abc, lbl
[113]588
589 def create_mask(self, *args, **kwargs):
590 """
[102]591 Compute and return a mask based on [min,max] windows.
[189]592 The specified windows are to be INCLUDED, when the mask is
[113]593 applied.
[102]594 Parameters:
595 [min,max],[min2,max2],...
596 Pairs of start/end points specifying the regions
597 to be masked
[189]598 invert: optional argument. If specified as True,
599 return an inverted mask, i.e. the regions
600 specified are EXCLUDED
[513]601 row: create the mask using the specified row for
602 unit conversions, default is row=0
603 only necessary if frequency varies over rows.
[102]604 Example:
[113]605 scan.set_unit('channel')
606
607 a)
[102]608 msk = scan.set_mask([400,500],[800,900])
[189]609 # masks everything outside 400 and 500
[113]610 # and 800 and 900 in the unit 'channel'
611
612 b)
613 msk = scan.set_mask([400,500],[800,900], invert=True)
[189]614 # masks the regions between 400 and 500
[113]615 # and 800 and 900 in the unit 'channel'
[710]616
[102]617 """
[513]618 row = 0
619 if kwargs.has_key("row"):
620 row = kwargs.get("row")
621 data = self._getabcissa(row)
[113]622 u = self._getcoordinfo()[0]
[718]623 if rcParams['verbose']:
[113]624 if u == "": u = "channel"
[718]625 from asap import asaplog
626 msg = "The current mask window unit is %s" % u
[914]627 if not self._check_ifs():
[876]628 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
[718]629 asaplog.push(msg)
[102]630 n = self.nchan()
[189]631 msk = zeros(n)
[710]632 # test if args is a 'list' or a 'normal *args - UGLY!!!
633
634 ws = (isinstance(args[-1][-1],int) or isinstance(args[-1][-1],float)) and args or args[0]
635 for window in ws:
[102]636 if (len(window) != 2 or window[0] > window[1] ):
[718]637 raise TypeError("A window needs to be defined as [min,max]")
[102]638 for i in range(n):
[113]639 if data[i] >= window[0] and data[i] < window[1]:
[189]640 msk[i] = 1
[113]641 if kwargs.has_key('invert'):
642 if kwargs.get('invert'):
643 from numarray import logical_not
[710]644 msk = logical_not(msk)
[718]645 print_log()
[102]646 return msk
[710]647
[256]648 def get_restfreqs(self):
649 """
650 Get the restfrequency(s) stored in this scantable.
651 The return value(s) are always of unit 'Hz'
652 Parameters:
653 none
654 Returns:
655 a list of doubles
656 """
657 return list(self._getrestfreqs())
[102]658
[402]659
[931]660 def set_restfreqs(self, freqs=None, unit='Hz'):
661 """
662 Set or replace the restfrequency specified and
663 If the 'freqs' argument holds a scalar,
664 then that rest frequency will be applied to all the selected
665 data. If the 'freqs' argument holds
666 a vector, then it MUST be of equal or smaller length than
667 the number of IFs (and the available restfrequencies will be
668 replaced by this vector). In this case, *all* data have
669 the restfrequency set per IF according
670 to the corresponding value you give in the 'freqs' vector.
671 E.g. 'freqs=[1e9,2e9]' would mean IF 0 gets restfreq 1e9 and
672 IF 1 gets restfreq 2e9.
673 You can also specify the frequencies via known line names
674 from the built-in Lovas table.
675 Parameters:
676 freqs: list of rest frequency values or string idenitfiers
677 unit: unit for rest frequency (default 'Hz')
[402]678
[931]679 Example:
680 # set the given restfrequency for the whole table
681 scan.set_restfreqs(freqs=1.4e9)
682 # If thee number of IFs in the data is >= 2 the IF0 gets the first
683 # value IF1 the second...
684 scan.set_restfreqs(freqs=[1.4e9,1.67e9])
685 #set the given restfrequency for the whole table (by name)
686 scan.set_restfreqs(freqs="OH1667")
[391]687
[931]688 Note:
689 To do more sophisticate Restfrequency setting, e.g. on a
690 source and IF basis, use scantable.set_selection() before using
691 this function.
692 # provide your scantable is call scan
693 selection = selector()
694 selection.set_name("ORION*")
695 selection.set_ifs([1])
696 scan.set_selection(selection)
697 scan.set_restfreqs(freqs=86.6e9)
698
699 """
700 varlist = vars()
701
702 t = type(freqs)
703 if isinstance(freqs, int) or isinstance(freqs,float):
704 self._setrestfreqs(freqs, unit)
705 elif isinstance(freqs, list) or isinstance(freqs,tuple):
706 if isinstance(freqs[-1], int) or isinstance(freqs[-1],float):
707 sel = selector()
708 savesel = self._getselection()
709 for i in xrange(len(freqs)):
[946]710 sel.set_ifs([i])
[931]711 self._setselection(sel)
712 self._setrestfreqs(freqs[i], unit)
713 self._setselection(savesel)
714 elif isinstance(freqs[-1], str):
715 # not yet implemented
716 pass
717 else:
718 return
719 self._add_history("set_restfreqs", varlist)
720
721
722
[484]723 def history(self):
724 hist = list(self._gethistory())
[794]725 out = "-"*80
[484]726 for h in hist:
[489]727 if h.startswith("---"):
[794]728 out += "\n"+h
[489]729 else:
730 items = h.split("##")
731 date = items[0]
732 func = items[1]
733 items = items[2:]
[794]734 out += "\n"+date+"\n"
735 out += "Function: %s\n Parameters:" % (func)
[489]736 for i in items:
737 s = i.split("=")
[794]738 out += "\n %s = %s" % (s[0],s[1])
739 out += "\n"+"-"*80
740 try:
741 from IPython.genutils import page as pager
742 except ImportError:
743 from pydoc import pager
744 pager(out)
[484]745 return
746
[513]747 #
748 # Maths business
749 #
750
[931]751 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
[513]752 """
753 Return the (time) average of a scan, or apply it 'insitu'.
754 Note:
755 in channels only
756 The cursor of the output scan is set to 0.
757 Parameters:
758 one scan or comma separated scans
759 mask: an optional mask (only used for 'var' and 'tsys'
760 weighting)
[558]761 scanav: True averages each scan separately
762 False (default) averages all scans together,
[524]763 weight: Weighting scheme. 'none', 'var' (1/var(spec)
[535]764 weighted), 'tsys' (1/Tsys**2 weighted), 'tint'
765 (integration time weighted) or 'tintsys' (Tint/Tsys**2).
766 The default is 'tint'
[931]767 align: align the spectra in velocity before averaging. It takes
768 the time of the first spectrum as reference time.
[513]769 Example:
770 # time average the scantable without using a mask
[710]771 newscan = scan.average_time()
[513]772 """
773 varlist = vars()
[524]774 if weight is None: weight = 'tint'
[513]775 if mask is None: mask = ()
[876]776 if scanav:
777 scanav = "SCAN"
778 else:
779 scanav = "NONE"
[931]780 s = scantable(self._math._average((self,), mask, weight.upper(),
781 scanav, align))
[513]782 s._add_history("average_time",varlist)
[718]783 print_log()
[513]784 return s
[710]785
[876]786 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[513]787 """
788 Return a scan where all spectra are converted to either
789 Jansky or Kelvin depending upon the flux units of the scan table.
790 By default the function tries to look the values up internally.
791 If it can't find them (or if you want to over-ride), you must
792 specify EITHER jyperk OR eta (and D which it will try to look up
793 also if you don't set it). jyperk takes precedence if you set both.
794 Parameters:
795 jyperk: the Jy / K conversion factor
796 eta: the aperture efficiency
797 d: the geomtric diameter (metres)
798 insitu: if False a new scantable is returned.
799 Otherwise, the scaling is done in-situ
800 The default is taken from .asaprc (False)
801 allaxes: if True apply to all spectra. Otherwise
802 apply only to the selected (beam/pol/if)spectra only
803 The default is taken from .asaprc (True if none)
804 """
805 if insitu is None: insitu = rcParams['insitu']
[876]806 self._math._setinsitu(insitu)
[513]807 varlist = vars()
808 if jyperk is None: jyperk = -1.0
809 if d is None: d = -1.0
810 if eta is None: eta = -1.0
[876]811 s = scantable(self._math._convertflux(self, d, eta, jyperk))
812 s._add_history("convert_flux", varlist)
813 print_log()
814 if insitu: self._assign(s)
815 else: return s
[513]816
[876]817 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[513]818 """
819 Return a scan after applying a gain-elevation correction.
820 The correction can be made via either a polynomial or a
821 table-based interpolation (and extrapolation if necessary).
822 You specify polynomial coefficients, an ascii table or neither.
823 If you specify neither, then a polynomial correction will be made
824 with built in coefficients known for certain telescopes (an error
825 will occur if the instrument is not known).
826 The data and Tsys are *divided* by the scaling factors.
827 Parameters:
828 poly: Polynomial coefficients (default None) to compute a
829 gain-elevation correction as a function of
830 elevation (in degrees).
831 filename: The name of an ascii file holding correction factors.
832 The first row of the ascii file must give the column
833 names and these MUST include columns
834 "ELEVATION" (degrees) and "FACTOR" (multiply data
835 by this) somewhere.
836 The second row must give the data type of the
837 column. Use 'R' for Real and 'I' for Integer.
838 An example file would be
839 (actual factors are arbitrary) :
840
841 TIME ELEVATION FACTOR
842 R R R
843 0.1 0 0.8
844 0.2 20 0.85
845 0.3 40 0.9
846 0.4 60 0.85
847 0.5 80 0.8
848 0.6 90 0.75
849 method: Interpolation method when correcting from a table.
850 Values are "nearest", "linear" (default), "cubic"
851 and "spline"
852 insitu: if False a new scantable is returned.
853 Otherwise, the scaling is done in-situ
854 The default is taken from .asaprc (False)
855 """
856
857 if insitu is None: insitu = rcParams['insitu']
[876]858 self._math._setinsitu(insitu)
[513]859 varlist = vars()
860 if poly is None:
861 poly = ()
862 from os.path import expandvars
863 filename = expandvars(filename)
[876]864 s = scantable(self._math._gainel(self, poly, filename, method))
865 s._add_history("gain_el", varlist)
866 print_log()
867 if insitu: self._assign(s)
868 else: return s
[710]869
[931]870 def freq_align(self, reftime=None, method='cubic', insitu=None):
[513]871 """
872 Return a scan where all rows have been aligned in frequency/velocity.
873 The alignment frequency frame (e.g. LSRK) is that set by function
874 set_freqframe.
875 Parameters:
876 reftime: reference time to align at. By default, the time of
877 the first row of data is used.
878 method: Interpolation method for regridding the spectra.
879 Choose from "nearest", "linear", "cubic" (default)
880 and "spline"
881 insitu: if False a new scantable is returned.
882 Otherwise, the scaling is done in-situ
883 The default is taken from .asaprc (False)
884 """
[931]885 if insitu is None: insitu = rcParams["insitu"]
[876]886 self._math._setinsitu(insitu)
[513]887 varlist = vars()
[931]888 if reftime is None: reftime = ""
889 s = scantable(self._math._freq_align(self, reftime, method))
[876]890 s._add_history("freq_align", varlist)
891 print_log()
892 if insitu: self._assign(s)
893 else: return s
[513]894
[876]895 def opacity(self, tau, insitu=None):
[513]896 """
897 Apply an opacity correction. The data
898 and Tsys are multiplied by the correction factor.
899 Parameters:
900 tau: Opacity from which the correction factor is
901 exp(tau*ZD)
902 where ZD is the zenith-distance
903 insitu: if False a new scantable is returned.
904 Otherwise, the scaling is done in-situ
905 The default is taken from .asaprc (False)
906 """
907 if insitu is None: insitu = rcParams['insitu']
[876]908 self._math._setinsitu(insitu)
[513]909 varlist = vars()
[876]910 s = scantable(self._math._opacity(self, tau))
911 s._add_history("opacity", varlist)
912 print_log()
913 if insitu: self._assign(s)
914 else: return s
[513]915
916 def bin(self, width=5, insitu=None):
917 """
918 Return a scan where all spectra have been binned up.
919 width: The bin width (default=5) in pixels
920 insitu: if False a new scantable is returned.
921 Otherwise, the scaling is done in-situ
922 The default is taken from .asaprc (False)
923 """
924 if insitu is None: insitu = rcParams['insitu']
[876]925 self._math._setinsitu(insitu)
[513]926 varlist = vars()
[876]927 s = scantable(self._math._bin(self, width))
928 s._add_history("bin",varlist)
929 print_log()
930 if insitu: self._assign(s)
931 else: return s
[513]932
[710]933
[513]934 def resample(self, width=5, method='cubic', insitu=None):
935 """
936 Return a scan where all spectra have been binned up
937 width: The bin width (default=5) in pixels
938 method: Interpolation method when correcting from a table.
939 Values are "nearest", "linear", "cubic" (default)
940 and "spline"
941 insitu: if False a new scantable is returned.
942 Otherwise, the scaling is done in-situ
943 The default is taken from .asaprc (False)
944 """
945 if insitu is None: insitu = rcParams['insitu']
[876]946 self._math._setinsitu(insitu)
[513]947 varlist = vars()
[876]948 s = scantable(self._math._resample(self, method, width))
949 s._add_history("resample",varlist)
950 print_log()
951 if insitu: self._assign(s)
952 else: return s
[513]953
954
[946]955 def average_pol(self, mask=None, weight='none'):
956 """
957 Average the Polarisations together.
958 Parameters:
959 mask: An optional mask defining the region, where the
960 averaging will be applied. The output will have all
961 specified points masked.
962 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
963 weighted), or 'tsys' (1/Tsys**2 weighted)
964 """
965 varlist = vars()
966 if mask is None:
967 mask = ()
968 s = self._math._averagepol(self, mask, weight)
969 s._add_history("average_pol",varlist)
970 print_log()
971 return scantable(s)
[513]972
[876]973 def smooth(self, kernel="hanning", width=5.0, insitu=None):
[513]974 """
975 Smooth the spectrum by the specified kernel (conserving flux).
976 Parameters:
977 scan: The input scan
978 kernel: The type of smoothing kernel. Select from
979 'hanning' (default), 'gaussian' and 'boxcar'.
980 The first three characters are sufficient.
981 width: The width of the kernel in pixels. For hanning this is
982 ignored otherwise it defauls to 5 pixels.
983 For 'gaussian' it is the Full Width Half
984 Maximum. For 'boxcar' it is the full width.
985 insitu: if False a new scantable is returned.
986 Otherwise, the scaling is done in-situ
987 The default is taken from .asaprc (False)
988 Example:
989 none
990 """
991 if insitu is None: insitu = rcParams['insitu']
[876]992 self._math._setinsitu(insitu)
[513]993 varlist = vars()
[876]994 s = scantable(self._math._smooth(self,kernel,width))
995 s._add_history("smooth", varlist)
996 print_log()
997 if insitu: self._assign(s)
998 else: return s
[513]999
[876]1000
1001 def poly_baseline(self, mask=None, order=0, insitu=None):
[513]1002 """
1003 Return a scan which has been baselined (all rows) by a polynomial.
1004 Parameters:
[794]1005 scan: a scantable
1006 mask: an optional mask
1007 order: the order of the polynomial (default is 0)
1008 insitu: if False a new scantable is returned.
1009 Otherwise, the scaling is done in-situ
1010 The default is taken from .asaprc (False)
1011 allaxes: If True (default) apply to all spectra. Otherwise
1012 apply only to the selected (beam/pol/if)spectra only
1013 The default is taken from .asaprc (True if none)
[513]1014 Example:
1015 # return a scan baselined by a third order polynomial,
1016 # not using a mask
1017 bscan = scan.poly_baseline(order=3)
[579]1018 """
[513]1019 if insitu is None: insitu = rcParams['insitu']
1020 varlist = vars()
1021 if mask is None:
1022 from numarray import ones
[876]1023 mask = list(ones(self.nchan(-1)))
[513]1024 from asap.asapfitter import fitter
1025 f = fitter()
1026 f.set_scan(self, mask)
1027 f.set_function(poly=order)
[876]1028 s = f.auto_fit(insitu)
1029 s._add_history("poly_baseline", varlist)
[718]1030 print_log()
[876]1031 if insitu: self._assign(s)
1032 else: return s
[513]1033
[931]1034 def auto_poly_baseline(self, mask=[], edge=(0,0), order=0,
[880]1035 threshold=3, insitu=None):
1036 """
1037 Return a scan which has been baselined (all rows) by a polynomial.
1038 Spectral lines are detected first using linefinder and masked out
1039 to avoid them affecting the baseline solution.
1040
1041 Parameters:
1042 mask: an optional mask retreived from scantable
1043 edge: an optional number of channel to drop at
1044 the edge of spectrum. If only one value is
1045 specified, the same number will be dropped from
1046 both sides of the spectrum. Default is to keep
[907]1047 all channels. Nested tuples represent individual
1048 edge selection for different IFs (a number of spectral
1049 channels can be different)
[880]1050 order: the order of the polynomial (default is 0)
1051 threshold: the threshold used by line finder. It is better to
1052 keep it large as only strong lines affect the
1053 baseline solution.
1054 insitu: if False a new scantable is returned.
1055 Otherwise, the scaling is done in-situ
1056 The default is taken from .asaprc (False)
1057
1058 Example:
1059 scan2=scan.auto_poly_baseline(order=7)
1060 """
1061 if insitu is None: insitu = rcParams['insitu']
1062 varlist = vars()
1063 from asap.asapfitter import fitter
1064 from asap.asaplinefind import linefinder
1065 from asap import _is_sequence_or_number as _is_valid
1066
[907]1067 # check whether edge is set up for each IF individually
1068 individualEdge = False;
1069 if len(edge)>1:
1070 if isinstance(edge[0],list) or isinstance(edge[0],tuple):
[919]1071 individualEdge = True;
[907]1072
1073 if not _is_valid(edge, int) and not individualEdge:
[909]1074 raise ValueError, "Parameter 'edge' has to be an integer or a \
[907]1075 pair of integers specified as a tuple. Nested tuples are allowed \
1076 to make individual selection for different IFs."
[919]1077
[907]1078 curedge = (0,0)
1079 if individualEdge:
1080 for edge_par in edge:
1081 if not _is_valid(edge,int):
[909]1082 raise ValueError, "Each element of the 'edge' tuple has \
[907]1083 to be a pair of integers or an integer."
1084 else:
1085 curedge = edge;
[880]1086
1087 # setup fitter
1088 f = fitter()
1089 f.set_function(poly=order)
1090
1091 # setup line finder
1092 fl=linefinder()
1093 fl.set_options(threshold=threshold)
1094
1095 if not insitu:
1096 workscan=self.copy()
1097 else:
1098 workscan=self
1099
[907]1100 fl.set_scan(workscan)
1101
[880]1102 rows=range(workscan.nrow())
1103 from asap import asaplog
1104 asaplog.push("Processing:")
1105 for r in rows:
1106 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % (workscan.getscan(r),workscan.getbeam(r),workscan.getif(r),workscan.getpol(r), workscan.getcycle(r))
1107 asaplog.push(msg, False)
[907]1108
1109 # figure out edge parameter
1110 if individualEdge:
1111 if len(edge)>=workscan.getif(r):
1112 raise RuntimeError, "Number of edge elements appear to be less than the number of IFs"
1113 curedge = edge[workscan.getif(r)]
[919]1114
[907]1115 # setup line finder
1116 fl.find_lines(r,mask,curedge)
[880]1117 f.set_scan(workscan, fl.get_mask())
1118 f.x = workscan._getabcissa(r)
1119 f.y = workscan._getspectrum(r)
1120 f.data = None
1121 f.fit()
1122 x = f.get_parameters()
1123 workscan._setspectrum(f.fitter.getresidual(), r)
1124 workscan._add_history("poly_baseline", varlist)
1125 if insitu:
1126 self._assign(workscan)
1127 else:
1128 return workscan
1129
[914]1130 def rotate_linpolphase(self, angle):
1131 """
1132 Rotate the phase of the complex polarization O=Q+iU correlation.
1133 This is always done in situ in the raw data. So if you call this
1134 function more than once then each call rotates the phase further.
1135 Parameters:
1136 angle: The angle (degrees) to rotate (add) by.
1137 Examples:
1138 scan.rotate_linpolphase(2.3)
1139 """
1140 varlist = vars()
[936]1141 self._math._rotate_linpolphase(self, angle)
[914]1142 self._add_history("rotate_linpolphase", varlist)
1143 print_log()
1144 return
[710]1145
[513]1146
[914]1147 def rotate_xyphase(self, angle):
1148 """
1149 Rotate the phase of the XY correlation. This is always done in situ
1150 in the data. So if you call this function more than once
1151 then each call rotates the phase further.
1152 Parameters:
1153 angle: The angle (degrees) to rotate (add) by.
1154 Examples:
1155 scan.rotate_xyphase(2.3)
1156 """
1157 varlist = vars()
[936]1158 self._math._rotate_xyphase(self, angle)
[914]1159 self._add_history("rotate_xyphase", varlist)
1160 print_log()
1161 return
1162
1163 def swap_linears(self):
1164 """
1165 Swap the linear polarisations XX and YY
1166 """
1167 varlist = vars()
[936]1168 self._math._swap_linears(self)
[914]1169 self._add_history("swap_linears", varlist)
1170 print_log()
1171 return
1172
1173 def invert_phase(self):
1174 """
1175 Invert the phase of the complex polarisation
1176 """
1177 varlist = vars()
[936]1178 self._math._invert_phase(self)
[914]1179 self._add_history("invert_phase", varlist)
1180 print_log()
1181 return
1182
[876]1183 def add(self, offset, insitu=None):
[513]1184 """
1185 Return a scan where all spectra have the offset added
1186 Parameters:
1187 offset: the offset
1188 insitu: if False a new scantable is returned.
1189 Otherwise, the scaling is done in-situ
1190 The default is taken from .asaprc (False)
1191 """
1192 if insitu is None: insitu = rcParams['insitu']
[876]1193 self._math._setinsitu(insitu)
[513]1194 varlist = vars()
[876]1195 s = scantable(self._math._unaryop(self, offset, "ADD", False))
1196 s._add_history("add",varlist)
1197 print_log()
1198 if insitu:
1199 self._assign(s)
1200 else:
[513]1201 return s
1202
[876]1203 def scale(self, factor, tsys=True, insitu=None,):
[513]1204 """
1205 Return a scan where all spectra are scaled by the give 'factor'
1206 Parameters:
1207 factor: the scaling factor
1208 insitu: if False a new scantable is returned.
1209 Otherwise, the scaling is done in-situ
1210 The default is taken from .asaprc (False)
1211 tsys: if True (default) then apply the operation to Tsys
1212 as well as the data
1213 """
1214 if insitu is None: insitu = rcParams['insitu']
[876]1215 self._math._setinsitu(insitu)
[513]1216 varlist = vars()
[876]1217 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
1218 s._add_history("scale",varlist)
1219 print_log()
1220 if insitu:
1221 self._assign(s)
1222 else:
[513]1223 return s
1224
[876]1225 def auto_quotient(self, mode='time', preserve=True):
[670]1226 """
1227 This function allows to build quotients automatically.
1228 It assumes the observation to have the same numer of
1229 "ons" and "offs"
1230 It will support "closest off in time" in the future
1231 Parameters:
1232 mode: the on/off detection mode; 'suffix' (default)
1233 'suffix' identifies 'off' scans by the
1234 trailing '_R' (Mopra/Parkes) or
1235 '_e'/'_w' (Tid)
[710]1236 preserve: you can preserve (default) the continuum or
1237 remove it. The equations used are
[670]1238 preserve: Output = Toff * (on/off) - Toff
1239 remove: Output = Tref * (on/off) - Ton
1240 """
[876]1241 modes = ["time"]
[670]1242 if not mode in modes:
[876]1243 msg = "please provide valid mode. Valid modes are %s" % (modes)
1244 raise ValueError(msg)
1245 varlist = vars()
1246 s = scantable(self._math._quotient(self, mode, preserve))
1247 s._add_history("auto_quotient",varlist)
1248 print_log()
1249 return s
[710]1250
[513]1251
[876]1252
1253
[718]1254 def freq_switch(self, insitu=None):
1255 """
1256 Apply frequency switching to the data.
1257 Parameters:
1258 insitu: if False a new scantable is returned.
1259 Otherwise, the swictching is done in-situ
1260 The default is taken from .asaprc (False)
1261 Example:
1262 none
1263 """
1264 if insitu is None: insitu = rcParams['insitu']
[876]1265 self._math._setinsitu(insitu)
[718]1266 varlist = vars()
[876]1267 s = scantable(self._math._freqswitch(self))
1268 s._add_history("freq_switch",varlist)
1269 print_log()
1270 if insitu: self._assign(s)
1271 else: return s
[718]1272
[780]1273 def recalc_azel(self):
1274 """
1275 Recalculate the azimuth and elevation for each position.
1276 Parameters:
1277 none
1278 Example:
1279 """
1280 varlist = vars()
[876]1281 self._recalcazel()
[780]1282 self._add_history("recalc_azel", varlist)
1283 print_log()
1284 return
1285
[513]1286 def __add__(self, other):
1287 varlist = vars()
1288 s = None
1289 if isinstance(other, scantable):
[876]1290 print "scantable + scantable NYI"
1291 return
[513]1292 elif isinstance(other, float):
[876]1293 s = scantable(self._math._unaryop(self, other, "ADD", False))
[513]1294 else:
[718]1295 raise TypeError("Other input is not a scantable or float value")
[513]1296 s._add_history("operator +", varlist)
[718]1297 print_log()
[513]1298 return s
1299
1300 def __sub__(self, other):
1301 """
1302 implicit on all axes and on Tsys
1303 """
1304 varlist = vars()
1305 s = None
1306 if isinstance(other, scantable):
[876]1307 print "scantable - scantable NYI"
1308 return
[513]1309 elif isinstance(other, float):
[876]1310 s = scantable(self._math._unaryop(self, other, "SUB", False))
[513]1311 else:
[718]1312 raise TypeError("Other input is not a scantable or float value")
[513]1313 s._add_history("operator -", varlist)
[718]1314 print_log()
[513]1315 return s
[710]1316
[513]1317 def __mul__(self, other):
1318 """
1319 implicit on all axes and on Tsys
1320 """
1321 varlist = vars()
1322 s = None
1323 if isinstance(other, scantable):
[876]1324 print "scantable * scantable NYI"
1325 return
[513]1326 elif isinstance(other, float):
[876]1327 s = scantable(self._math._unaryop(self, other, "MUL", False))
[513]1328 else:
[718]1329 raise TypeError("Other input is not a scantable or float value")
[513]1330 s._add_history("operator *", varlist)
[718]1331 print_log()
[513]1332 return s
1333
[710]1334
[513]1335 def __div__(self, other):
1336 """
1337 implicit on all axes and on Tsys
1338 """
1339 varlist = vars()
1340 s = None
1341 if isinstance(other, scantable):
[876]1342 print "scantable / scantable NYI"
1343 return
[513]1344 elif isinstance(other, float):
1345 if other == 0.0:
[718]1346 raise ZeroDivisionError("Dividing by zero is not recommended")
[876]1347 s = scantable(self._math._unaryop(self, other, "DIV", False))
[513]1348 else:
[718]1349 raise TypeError("Other input is not a scantable or float value")
[513]1350 s._add_history("operator /", varlist)
[718]1351 print_log()
[513]1352 return s
1353
[530]1354 def get_fit(self, row=0):
1355 """
1356 Print or return the stored fits for a row in the scantable
1357 Parameters:
1358 row: the row which the fit has been applied to.
1359 """
1360 if row > self.nrow():
1361 return
1362 from asap import asapfit
1363 fit = asapfit(self._getfit(row))
[718]1364 if rcParams['verbose']:
[530]1365 print fit
1366 return
1367 else:
1368 return fit.as_dict()
1369
[484]1370 def _add_history(self, funcname, parameters):
1371 # create date
1372 sep = "##"
1373 from datetime import datetime
1374 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1375 hist = dstr+sep
1376 hist += funcname+sep#cdate+sep
1377 if parameters.has_key('self'): del parameters['self']
1378 for k,v in parameters.iteritems():
1379 if type(v) is dict:
1380 for k2,v2 in v.iteritems():
1381 hist += k2
1382 hist += "="
1383 if isinstance(v2,scantable):
1384 hist += 'scantable'
1385 elif k2 == 'mask':
[513]1386 if isinstance(v2,list) or isinstance(v2,tuple):
1387 hist += str(self._zip_mask(v2))
1388 else:
1389 hist += str(v2)
[484]1390 else:
[513]1391 hist += str(v2)
[484]1392 else:
1393 hist += k
1394 hist += "="
1395 if isinstance(v,scantable):
1396 hist += 'scantable'
1397 elif k == 'mask':
[513]1398 if isinstance(v,list) or isinstance(v,tuple):
1399 hist += str(self._zip_mask(v))
1400 else:
1401 hist += str(v)
[484]1402 else:
1403 hist += str(v)
1404 hist += sep
1405 hist = hist[:-2] # remove trailing '##'
1406 self._addhistory(hist)
1407
[710]1408
[484]1409 def _zip_mask(self, mask):
1410 mask = list(mask)
1411 i = 0
1412 segments = []
1413 while mask[i:].count(1):
1414 i += mask[i:].index(1)
1415 if mask[i:].count(0):
1416 j = i + mask[i:].index(0)
1417 else:
[710]1418 j = len(mask)
[484]1419 segments.append([i,j])
[710]1420 i = j
[484]1421 return segments
[714]1422
[626]1423 def _get_ordinate_label(self):
1424 fu = "("+self.get_fluxunit()+")"
1425 import re
1426 lbl = "Intensity"
1427 if re.match(".K.",fu):
1428 lbl = "Brightness Temperature "+ fu
1429 elif re.match(".Jy.",fu):
1430 lbl = "Flux density "+ fu
1431 return lbl
[710]1432
[876]1433 def _check_ifs(self):
1434 nchans = [self.nchan(i) for i in range(self.nif(-1))]
[889]1435 nchans = filter(lambda t: t > 0, nchans)
[876]1436 return (sum(nchans)/len(nchans) == nchans[0])
Note: See TracBrowser for help on using the repository browser.