source: trunk/python/scantable.py@ 908

Last change on this file since 908 was 907, checked in by vor010, 19 years ago

LineFinder & auto_poly_baseline: a support of
new scantable format has been added. Now findLines accept a mask and edge
parameters, which can therefore be different for different rows. Constructor
need now just a scan table. Boost types removed from STLineFinder. auto_poly_baseline can accept a nested tuple of edges, which would be interpreted as
different edge parameters for different IFs

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 52.0 KB
RevLine 
[876]1from asap._asap import Scantable
[226]2from asap import rcParams
[714]3from asap import print_log
[189]4from numarray import ones,zeros
[102]5import sys
6
[876]7class scantable(Scantable):
[102]8 """
9 The ASAP container for scans
10 """
[710]11
[484]12 def __init__(self, filename, average=None, unit=None):
[102]13 """
14 Create a scantable from a saved one or make a reference
15 Parameters:
[181]16 filename: the name of an asap table on disk
17 or
18 the name of a rpfits/sdfits/ms file
19 (integrations within scans are auto averaged
20 and the whole file is read)
21 or
22 [advanced] a reference to an existing
[102]23 scantable
[484]24 average: average all integrations withinb a scan on read.
25 The default (True) is taken from .asaprc.
26 unit: brightness unit; must be consistent with K or Jy.
[340]27 Over-rides the default selected by the reader
28 (input rpfits/sdfits/ms) or replaces the value
29 in existing scantables
[710]30 """
[489]31 if average is None or type(average) is not bool:
[710]32 average = rcParams['scantable.autoaverage']
[489]33
[484]34 varlist = vars()
[876]35 from asap._asap import stmath
36 self._math = stmath()
[710]37 from asap import asaplog
[876]38 if isinstance(filename, Scantable):
39 Scantable.__init__(self, filename)
[340]40 if unit is not None:
[710]41 self.set_fluxunit(unit)
[181]42 else:
[411]43 import os.path
44 if not os.path.exists(filename):
[710]45 s = "File '%s' not found." % (filename)
46 if rcParams['verbose']:
47 asaplog.push(s)
48 print asaplog.pop().strip()
49 return
50 raise IOError(s)
[411]51 filename = os.path.expandvars(filename)
52 if os.path.isdir(filename):
[181]53 # crude check if asap table
[411]54 if os.path.exists(filename+'/table.info'):
[876]55 Scantable.__init__(self, filename, "memory")
[340]56 if unit is not None:
[710]57 self.set_fluxunit(unit)
[181]58 else:
[718]59 msg = "The given file '%s'is not a valid asap table." % (filename)
60 if rcParams['verbose']:
61 print msg
62 return
63 else:
64 raise IOError(msg)
[710]65 else:
[876]66 from asap._asap import stfiller
[340]67 ifSel = -1
68 beamSel = -1
[876]69 r = stfiller()
[710]70 r._open(filename,ifSel,beamSel)
71 asaplog.push('Importing data...')
[762]72 print_log()
[876]73 r._read()
[411]74 tbl = r._getdata()
[346]75 if unit is not None:
76 tbl.set_fluxunit(unit)
[489]77 if average:
[710]78 asaplog.push('Auto averaging integrations...')
[762]79 print_log()
[876]80 tbl2 = self._math._average((tbl,),(),'NONE','SCAN', False)
81 Scantable.__init__(self,tbl2)
[513]82 del tbl2
[226]83 else:
[876]84 Scantable.__init__(self,tbl)
[513]85 del r,tbl
[484]86 self._add_history("scantable", varlist)
[714]87 print_log()
[102]88
[876]89 def save(self, name=None, format=None, overwrite=False):
[116]90 """
[710]91 Store the scantable on disk. This can be an asap (aips++) Table, SDFITS,
[280]92 Image FITS or MS2 format.
[116]93 Parameters:
[196]94 name: the name of the outputfile. For format="FITS" this
[489]95 is the directory file name into which all the files
[497]96 will be written (default is 'asap_FITS'). For format
97 "ASCII" this is the root file name (data in 'name'.txt
98 and header in 'name'_header.txt)
[116]99 format: an optional file format. Default is ASAP.
[280]100 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
[194]101 'SDFITS' (save as SDFITS file)
[710]102 'FITS' (saves each row as a FITS Image)
[200]103 'ASCII' (saves as ascii text file)
[226]104 'MS2' (saves as an aips++
105 MeasurementSet V2)
[411]106 overwrite: If the file should be overwritten if it exists.
[256]107 The default False is to return with warning
[411]108 without writing the output. USE WITH CARE.
[116]109 Example:
110 scan.save('myscan.asap')
111 scan.save('myscan.sdfits','SDFITS')
112 """
[411]113 from os import path
[226]114 if format is None: format = rcParams['scantable.save']
[256]115 suffix = '.'+format.lower()
116 if name is None or name =="":
117 name = 'scantable'+suffix
[718]118 from asap import asaplog
119 msg = "No filename given. Using default name %s..." % name
120 asaplog.push(msg)
[411]121 name = path.expandvars(name)
[256]122 if path.isfile(name) or path.isdir(name):
123 if not overwrite:
[718]124 msg = "File %s exists." % name
125 if rcParams['verbose']:
126 print msg
127 return
128 else:
129 raise IOError(msg)
[451]130 format2 = format.upper()
131 if format2 == 'ASAP':
[116]132 self._save(name)
133 else:
[876]134 print "NYI"
135# from asap._asap import sdwriter as _sw
136# w = _sw(format2)
137# w.write(self, name, stokes)
[718]138
139 print_log()
[116]140 return
141
[102]142 def copy(self):
143 """
144 Return a copy of this scantable.
145 Parameters:
[113]146 none
[102]147 Example:
148 copiedscan = scan.copy()
149 """
[876]150 sd = scantable(Scantable._copy(self))
[113]151 return sd
152
[102]153 def get_scan(self, scanid=None):
154 """
155 Return a specific scan (by scanno) or collection of scans (by
156 source name) in a new scantable.
157 Parameters:
[513]158 scanid: a (list of) scanno or a source name, unix-style
159 patterns are accepted for source name matching, e.g.
160 '*_R' gets all 'ref scans
[102]161 Example:
[513]162 # get all scans containing the source '323p459'
163 newscan = scan.get_scan('323p459')
164 # get all 'off' scans
165 refscans = scan.get_scan('*_R')
166 # get a susbset of scans by scanno (as listed in scan.summary())
167 newscan = scan.get_scan([0,2,7,10])
[102]168 """
169 if scanid is None:
[718]170 if rcParams['verbose']:
171 print "Please specify a scan no or name to retrieve from the scantable"
172 return
173 else:
174 raise RuntimeError("No scan given")
175
[102]176 try:
[876]177 bsel = self._getselection()
178 sel = asap._asap.Selector()
[102]179 if type(scanid) is str:
[876]180 sel._setname()
181 self._setselection(sel)
182 scopy = self._copy()
183 self._setselection(bsel)
184 return scantable(scopy)
[102]185 elif type(scanid) is int:
[876]186 sel._setscans([scanid])
187 self._setselection(sel)
188 scopy = self._copy()
189 self._setselection(bsel)
190 return scantable(scopy)
[381]191 elif type(scanid) is list:
[876]192 sel._setscans(scanid)
193 self._setselection(sel)
194 scopy = self._copy()
195 self._setselection(bsel)
196 return scantable(scopy)
[381]197 else:
[718]198 msg = "Illegal scanid type, use 'int' or 'list' if ints."
199 if rcParams['verbose']:
200 print msg
201 else:
202 raise TypeError(msg)
[102]203 except RuntimeError:
[718]204 if rcParams['verbose']: print "Couldn't find any match."
205 else: raise
[102]206
207 def __str__(self):
[876]208 return Scantable._summary(self,True)
[102]209
[876]210 def summary(self, filename=None, verbose=None):
[102]211 """
212 Print a summary of the contents of this scantable.
213 Parameters:
214 filename: the name of a file to write the putput to
215 Default - no file output
[381]216 verbose: print extra info such as the frequency table
217 The default (False) is taken from .asaprc
[102]218 """
[876]219 info = Scantable._summary(self, verbose)
[381]220 if verbose is None: verbose = rcParams['scantable.verbosesummary']
[102]221 if filename is not None:
[256]222 if filename is "":
223 filename = 'scantable_summary.txt'
[415]224 from os.path import expandvars, isdir
[411]225 filename = expandvars(filename)
[415]226 if not isdir(filename):
[413]227 data = open(filename, 'w')
228 data.write(info)
229 data.close()
230 else:
[718]231 msg = "Illegal file name '%s'." % (filename)
232 if rcParams['verbose']:
233 print msg
234 else:
235 raise IOError(msg)
236 if rcParams['verbose']:
[794]237 try:
238 from IPython.genutils import page as pager
239 except ImportError:
240 from pydoc import pager
241 pager(info)
[718]242 else:
243 return info
[710]244
[553]245 def set_cursor(self, beam=0, IF=0, pol=0):
[102]246 """
247 Set the spectrum for individual operations.
248 Parameters:
[553]249 beam, IF, pol: a number
[102]250 Example:
[256]251 scan.set_cursor(0,0,1)
[135]252 pol1sig = scan.stats(all=False) # returns std dev for beam=0
[181]253 # if=0, pol=1
[102]254 """
[876]255 print "DEPRECATED"
[484]256 varlist = vars()
[876]257 sel = asap._asap.Selector()
258 sel._setbeams([beam])
259 sel._setpols([pol])
260 sel._setifs([IF])
261 self._add_history("set_cursor", varlist)
[102]262 return
263
[256]264 def get_cursor(self):
[113]265 """
266 Return/print a the current 'cursor' into the Beam/IF/Pol cube.
267 Parameters:
268 none
269 Returns:
270 a list of values (currentBeam,currentIF,currentPol)
271 Example:
272 none
273 """
[876]274 print "DEPRECATED"
275 sel = self._getselection()
276 i = sel.getbeams()[0]
277 j = sel.getifs()[0]
278 k = sel.getpols()[0]
[721]279 from asap import asaplog
280 out = "--------------------------------------------------\n"
281 out += " Cursor position\n"
282 out += "--------------------------------------------------\n"
283 out += 'Beam=%d IF=%d Pol=%d ' % (i,j,k)
284 asaplog.push(out)
285 print_log()
286 return i,j,k
[102]287
[876]288 def stats(self, stat='stddev', mask=None):
[102]289 """
[135]290 Determine the specified statistic of the current beam/if/pol
[102]291 Takes a 'mask' as an optional parameter to specify which
292 channels should be excluded.
293 Parameters:
[135]294 stat: 'min', 'max', 'sumsq', 'sum', 'mean'
295 'var', 'stddev', 'avdev', 'rms', 'median'
296 mask: an optional mask specifying where the statistic
[102]297 should be determined.
298 Example:
[113]299 scan.set_unit('channel')
[102]300 msk = scan.create_mask([100,200],[500,600])
[135]301 scan.stats(stat='mean', mask=m)
[102]302 """
[256]303 from numarray import array,zeros,Float
[102]304 if mask == None:
[876]305 mask = []
[256]306 axes = ['Beam','IF','Pol','Time']
[876]307 if not self._check_ifs():
308 raise ValueError("Cannot apply mask as the IFs have different number of channels"
309 "Please use setselection() to select individual IFs")
[256]310
[876]311 statvals = self._math._stats(self, mask, stat)
312 out = ''
313 axes = []
314 for i in range(self.nrow()):
315 axis = []
316 axis.append(self.getscan(i))
317 axis.append(self.getbeam(i))
318 axis.append(self.getif(i))
319 axis.append(self.getpol(i))
320 axis.append(self.getcycle(i))
321 axes.append(axis)
322 tm = self._gettime(i)
323 src = self._getsourcename(i)
324 out += 'Scan[%d] (%s) ' % (axis[0], src)
325 out += 'Time[%s]:\n' % (tm)
326 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
327 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
328 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
329 out += '= %3.3f\n' % (statvals[i])
330 out += "--------------------------------------------------\n"
[256]331
[876]332 if rcParams['verbose']:
333 print "--------------------------------------------------"
334 print " ",stat
335 print "--------------------------------------------------"
336 print out
337 retval = { 'axesnames': ['scanno','beamno','ifno','polno','cycleno'],
338 'axes' : axes,
339 'data': statvals}
340 return retval
[102]341
[876]342 def stddev(self,mask=None):
[135]343 """
344 Determine the standard deviation of the current beam/if/pol
345 Takes a 'mask' as an optional parameter to specify which
346 channels should be excluded.
347 Parameters:
348 mask: an optional mask specifying where the standard
349 deviation should be determined.
350
351 Example:
352 scan.set_unit('channel')
353 msk = scan.create_mask([100,200],[500,600])
354 scan.stddev(mask=m)
355 """
[876]356 return self.stats(stat='stddev',mask=mask);
[135]357
[876]358 def get_tsys(self):
[113]359 """
360 Return the System temperatures.
361 Parameters:
[876]362
[113]363 Returns:
[876]364 a list of Tsys values for the current selection
[113]365 """
[256]366
[876]367 return self._row_callback(self._gettsys, "Tsys")
[256]368
[876]369 def _row_callback(self, callback, label):
370 axes = []
371 axesnames = ['scanno','beamno','ifno','polno','cycleno']
372 out = ""
373 outvec =[]
374 for i in range(self.nrow()):
375 axis = []
376 axis.append(self.getscan(i))
377 axis.append(self.getbeam(i))
378 axis.append(self.getif(i))
379 axis.append(self.getpol(i))
380 axis.append(self.getcycle(i))
381 axes.append(axis)
382 tm = self._gettime(i)
383 src = self._getsourcename(i)
384 out += 'Scan[%d] (%s) ' % (axis[0], src)
385 out += 'Time[%s]:\n' % (tm)
386 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
387 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
388 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
389 outvec.append(callback(i))
390 out += '= %3.3f\n' % (outvec[i])
391 out += "--------------------------------------------------\n"
392 if rcParams['verbose']:
393 print "--------------------------------------------------"
394 print " %s" % (label)
395 print "--------------------------------------------------"
396 print out
397 retval = {'axesnames': axesnames, 'axes': axes, 'data': outvec}
398 return retval
[256]399
400
[407]401 def get_time(self, row=-1):
[113]402 """
403 Get a list of time stamps for the observations.
[181]404 Return a string for each integration in the scantable.
[113]405 Parameters:
[407]406 row: row no of integration. Default -1 return all rows
[113]407 Example:
408 none
409 """
410 out = []
[407]411 if row == -1:
412 for i in range(self.nrow()):
413 out.append(self._gettime(i))
414 return out
415 else:
416 if row < self.nrow():
417 return self._gettime(row)
[102]418
[714]419 def get_sourcename(self, row=-1):
420 """
[794]421 Get a list source names for the observations.
[714]422 Return a string for each integration in the scantable.
423 Parameters:
424 row: row no of integration. Default -1 return all rows
425 Example:
426 none
427 """
428 out = []
429 if row == -1:
430 return [self._getsourcename(i) for i in range(self.nrow())]
431 else:
432 if 0 <= row < self.nrow():
433 return self._getsourcename(row)
434
[794]435 def get_elevation(self, row=-1):
436 """
437 Get a list of elevations for the observations.
438 Return a float for each integration in the scantable.
439 Parameters:
440 row: row no of integration. Default -1 return all rows
441 Example:
442 none
443 """
444 out = []
445 if row == -1:
446 return [self._getelevation(i) for i in range(self.nrow())]
447 else:
448 if 0 <= row < self.nrow():
449 return self._getelevation(row)
450
451 def get_azimuth(self, row=-1):
452 """
453 Get a list of azimuths for the observations.
454 Return a float for each integration in the scantable.
455 Parameters:
456 row: row no of integration. Default -1 return all rows
457 Example:
458 none
459 """
460 out = []
461 if row == -1:
462 return [self._getazimuth(i) for i in range(self.nrow())]
463 else:
464 if 0 <= row < self.nrow():
465 return self._getazimuth(row)
466
467 def get_parangle(self, row=-1):
468 """
469 Get a list of parallactic angles for the observations.
470 Return a float for each integration in the scantable.
471 Parameters:
472 row: row no of integration. Default -1 return all rows
473 Example:
474 none
475 """
476 out = []
477 if row == -1:
478 return [self._getparangle(i) for i in range(self.nrow())]
479 else:
480 if 0 <= row < self.nrow():
481 return self._getparangle(row)
482
[102]483 def set_unit(self, unit='channel'):
484 """
485 Set the unit for all following operations on this scantable
486 Parameters:
487 unit: optional unit, default is 'channel'
[113]488 one of '*Hz','km/s','channel', ''
[102]489 """
[484]490 varlist = vars()
[113]491 if unit in ['','pixel', 'channel']:
492 unit = ''
493 inf = list(self._getcoordinfo())
494 inf[0] = unit
495 self._setcoordinfo(inf)
[484]496 self._add_history("set_unit",varlist)
[113]497
[484]498 def set_instrument(self, instr):
[358]499 """
500 Set the instrument for subsequent processing
501 Parameters:
[710]502 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]503 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[358]504 """
505 self._setInstrument(instr)
[484]506 self._add_history("set_instument",vars())
[718]507 print_log()
[358]508
[276]509 def set_doppler(self, doppler='RADIO'):
510 """
511 Set the doppler for all following operations on this scantable.
512 Parameters:
513 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
514 """
[484]515 varlist = vars()
[276]516 inf = list(self._getcoordinfo())
517 inf[2] = doppler
518 self._setcoordinfo(inf)
[484]519 self._add_history("set_doppler",vars())
[718]520 print_log()
[710]521
[226]522 def set_freqframe(self, frame=None):
[113]523 """
524 Set the frame type of the Spectral Axis.
525 Parameters:
[591]526 frame: an optional frame type, default 'LSRK'. Valid frames are:
527 'REST','TOPO','LSRD','LSRK','BARY',
528 'GEO','GALACTO','LGROUP','CMB'
[113]529 Examples:
530 scan.set_freqframe('BARY')
531 """
[484]532 if frame is None: frame = rcParams['scantable.freqframe']
533 varlist = vars()
[113]534 valid = ['REST','TOPO','LSRD','LSRK','BARY', \
535 'GEO','GALACTO','LGROUP','CMB']
[591]536
[876]537 if 1:#frame in valid:
[113]538 inf = list(self._getcoordinfo())
539 inf[1] = frame
540 self._setcoordinfo(inf)
[484]541 self._add_history("set_freqframe",varlist)
[102]542 else:
[718]543 msg = "Please specify a valid freq type. Valid types are:\n",valid
544 if rcParams['verbose']:
545 print msg
546 else:
547 raise TypeError(msg)
548 print_log()
[710]549
[113]550 def get_unit(self):
551 """
552 Get the default unit set in this scantable
553 Parameters:
554 Returns:
555 A unit string
556 """
557 inf = self._getcoordinfo()
558 unit = inf[0]
559 if unit == '': unit = 'channel'
560 return unit
[102]561
[158]562 def get_abcissa(self, rowno=0):
[102]563 """
[158]564 Get the abcissa in the current coordinate setup for the currently
[113]565 selected Beam/IF/Pol
566 Parameters:
[226]567 rowno: an optional row number in the scantable. Default is the
568 first row, i.e. rowno=0
[113]569 Returns:
[407]570 The abcissa values and it's format string (as a dictionary)
[113]571 """
[256]572 abc = self._getabcissa(rowno)
[710]573 lbl = self._getabcissalabel(rowno)
[718]574 print_log()
[158]575 return abc, lbl
[113]576
577 def create_mask(self, *args, **kwargs):
578 """
[102]579 Compute and return a mask based on [min,max] windows.
[189]580 The specified windows are to be INCLUDED, when the mask is
[113]581 applied.
[102]582 Parameters:
583 [min,max],[min2,max2],...
584 Pairs of start/end points specifying the regions
585 to be masked
[189]586 invert: optional argument. If specified as True,
587 return an inverted mask, i.e. the regions
588 specified are EXCLUDED
[513]589 row: create the mask using the specified row for
590 unit conversions, default is row=0
591 only necessary if frequency varies over rows.
[102]592 Example:
[113]593 scan.set_unit('channel')
594
595 a)
[102]596 msk = scan.set_mask([400,500],[800,900])
[189]597 # masks everything outside 400 and 500
[113]598 # and 800 and 900 in the unit 'channel'
599
600 b)
601 msk = scan.set_mask([400,500],[800,900], invert=True)
[189]602 # masks the regions between 400 and 500
[113]603 # and 800 and 900 in the unit 'channel'
[710]604
[102]605 """
[513]606 row = 0
607 if kwargs.has_key("row"):
608 row = kwargs.get("row")
609 data = self._getabcissa(row)
[113]610 u = self._getcoordinfo()[0]
[718]611 if rcParams['verbose']:
[113]612 if u == "": u = "channel"
[718]613 from asap import asaplog
614 msg = "The current mask window unit is %s" % u
[876]615 if not _check_ifs():
616 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
[718]617 asaplog.push(msg)
[102]618 n = self.nchan()
[189]619 msk = zeros(n)
[710]620 # test if args is a 'list' or a 'normal *args - UGLY!!!
621
622 ws = (isinstance(args[-1][-1],int) or isinstance(args[-1][-1],float)) and args or args[0]
623 for window in ws:
[102]624 if (len(window) != 2 or window[0] > window[1] ):
[718]625 raise TypeError("A window needs to be defined as [min,max]")
[102]626 for i in range(n):
[113]627 if data[i] >= window[0] and data[i] < window[1]:
[189]628 msk[i] = 1
[113]629 if kwargs.has_key('invert'):
630 if kwargs.get('invert'):
631 from numarray import logical_not
[710]632 msk = logical_not(msk)
[718]633 print_log()
[102]634 return msk
[710]635
[256]636 def get_restfreqs(self):
637 """
638 Get the restfrequency(s) stored in this scantable.
639 The return value(s) are always of unit 'Hz'
640 Parameters:
641 none
642 Returns:
643 a list of doubles
644 """
645 return list(self._getrestfreqs())
[102]646
[402]647
[876]648# def set_restfreqs(self, freqs=None, unit='Hz', lines=None, source=None,
649# theif=None):
650# """
651# Select the restfrequency for the specified source and IF OR
652# replace for all IFs. If the 'freqs' argument holds a scalar,
653# then that rest frequency will be applied to the selected
654# data (and added to the list of available rest frequencies).
655# In this way, you can set a rest frequency for each
656# source and IF combination. If the 'freqs' argument holds
657# a vector, then it MUST be of length the number of IFs
658# (and the available restfrequencies will be replaced by
659# this vector). In this case, *all* data ('source' and
660# 'theif' are ignored) have the restfrequency set per IF according
661# to the corresponding value you give in the 'freqs' vector.
662# E.g. 'freqs=[1e9,2e9]' would mean IF 0 gets restfreq 1e9 and
663# IF 1 gets restfreq 2e9.
664#
665# You can also specify the frequencies via known line names
666# in the argument 'lines'. Use 'freqs' or 'lines'. 'freqs'
667# takes precedence. See the list of known names via function
668# scantable.lines()
669# Parameters:
670# freqs: list of rest frequencies
671# unit: unit for rest frequency (default 'Hz')
672# lines: list of known spectral lines names (alternative to freqs).
673# See possible list via scantable.lines()
674# source: Source name (blank means all)
675# theif: IF (-1 means all)
676# Example:
677# scan.set_restfreqs(freqs=1.4e9, source='NGC253', theif=2)
678# scan.set_restfreqs(freqs=[1.4e9,1.67e9])
679# """
680# varlist = vars()
681# if source is None:
682# source = ""
683# if theif is None:
684# theif = -1
685# t = type(freqs)
686# if t is int or t is float:
687# freqs = [freqs]
688# if freqs is None:
689# freqs = []
690# t = type(lines)
691# if t is str:
692# lines = [lines]
693# if lines is None:
694# lines = []
695# self._setrestfreqs(freqs, unit, lines, source, theif)
696# self._add_history("set_restfreqs", varlist)
697#
[402]698
[391]699
[484]700 def history(self):
701 hist = list(self._gethistory())
[794]702 out = "-"*80
[484]703 for h in hist:
[489]704 if h.startswith("---"):
[794]705 out += "\n"+h
[489]706 else:
707 items = h.split("##")
708 date = items[0]
709 func = items[1]
710 items = items[2:]
[794]711 out += "\n"+date+"\n"
712 out += "Function: %s\n Parameters:" % (func)
[489]713 for i in items:
714 s = i.split("=")
[794]715 out += "\n %s = %s" % (s[0],s[1])
716 out += "\n"+"-"*80
717 try:
718 from IPython.genutils import page as pager
719 except ImportError:
720 from pydoc import pager
721 pager(out)
[484]722 return
723
[513]724 #
725 # Maths business
726 #
727
[558]728 def average_time(self, mask=None, scanav=False, weight='tint'):
[513]729 """
730 Return the (time) average of a scan, or apply it 'insitu'.
731 Note:
732 in channels only
733 The cursor of the output scan is set to 0.
734 Parameters:
735 one scan or comma separated scans
736 mask: an optional mask (only used for 'var' and 'tsys'
737 weighting)
[558]738 scanav: True averages each scan separately
739 False (default) averages all scans together,
[524]740 weight: Weighting scheme. 'none', 'var' (1/var(spec)
[535]741 weighted), 'tsys' (1/Tsys**2 weighted), 'tint'
742 (integration time weighted) or 'tintsys' (Tint/Tsys**2).
743 The default is 'tint'
[513]744 Example:
745 # time average the scantable without using a mask
[710]746 newscan = scan.average_time()
[513]747 """
748 varlist = vars()
[524]749 if weight is None: weight = 'tint'
[513]750 if mask is None: mask = ()
[876]751 if scanav:
752 scanav = "SCAN"
753 else:
754 scanav = "NONE"
755 s = scantable(self._math._average((self,), mask, weight, scanav, False))
[513]756 s._add_history("average_time",varlist)
[718]757 print_log()
[513]758 return s
[710]759
[876]760 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[513]761 """
762 Return a scan where all spectra are converted to either
763 Jansky or Kelvin depending upon the flux units of the scan table.
764 By default the function tries to look the values up internally.
765 If it can't find them (or if you want to over-ride), you must
766 specify EITHER jyperk OR eta (and D which it will try to look up
767 also if you don't set it). jyperk takes precedence if you set both.
768 Parameters:
769 jyperk: the Jy / K conversion factor
770 eta: the aperture efficiency
771 d: the geomtric diameter (metres)
772 insitu: if False a new scantable is returned.
773 Otherwise, the scaling is done in-situ
774 The default is taken from .asaprc (False)
775 allaxes: if True apply to all spectra. Otherwise
776 apply only to the selected (beam/pol/if)spectra only
777 The default is taken from .asaprc (True if none)
778 """
779 if insitu is None: insitu = rcParams['insitu']
[876]780 self._math._setinsitu(insitu)
[513]781 varlist = vars()
782 if jyperk is None: jyperk = -1.0
783 if d is None: d = -1.0
784 if eta is None: eta = -1.0
[876]785 s = scantable(self._math._convertflux(self, d, eta, jyperk))
786 s._add_history("convert_flux", varlist)
787 print_log()
788 if insitu: self._assign(s)
789 else: return s
[513]790
[876]791 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[513]792 """
793 Return a scan after applying a gain-elevation correction.
794 The correction can be made via either a polynomial or a
795 table-based interpolation (and extrapolation if necessary).
796 You specify polynomial coefficients, an ascii table or neither.
797 If you specify neither, then a polynomial correction will be made
798 with built in coefficients known for certain telescopes (an error
799 will occur if the instrument is not known).
800 The data and Tsys are *divided* by the scaling factors.
801 Parameters:
802 poly: Polynomial coefficients (default None) to compute a
803 gain-elevation correction as a function of
804 elevation (in degrees).
805 filename: The name of an ascii file holding correction factors.
806 The first row of the ascii file must give the column
807 names and these MUST include columns
808 "ELEVATION" (degrees) and "FACTOR" (multiply data
809 by this) somewhere.
810 The second row must give the data type of the
811 column. Use 'R' for Real and 'I' for Integer.
812 An example file would be
813 (actual factors are arbitrary) :
814
815 TIME ELEVATION FACTOR
816 R R R
817 0.1 0 0.8
818 0.2 20 0.85
819 0.3 40 0.9
820 0.4 60 0.85
821 0.5 80 0.8
822 0.6 90 0.75
823 method: Interpolation method when correcting from a table.
824 Values are "nearest", "linear" (default), "cubic"
825 and "spline"
826 insitu: if False a new scantable is returned.
827 Otherwise, the scaling is done in-situ
828 The default is taken from .asaprc (False)
829 """
830
831 if insitu is None: insitu = rcParams['insitu']
[876]832 self._math._setinsitu(insitu)
[513]833 varlist = vars()
834 if poly is None:
835 poly = ()
836 from os.path import expandvars
837 filename = expandvars(filename)
[876]838 s = scantable(self._math._gainel(self, poly, filename, method))
839 s._add_history("gain_el", varlist)
840 print_log()
841 if insitu: self._assign(s)
842 else: return s
[710]843
[513]844 def freq_align(self, reftime=None, method='cubic', perif=False,
845 insitu=None):
846 """
847 Return a scan where all rows have been aligned in frequency/velocity.
848 The alignment frequency frame (e.g. LSRK) is that set by function
849 set_freqframe.
850 Parameters:
851 reftime: reference time to align at. By default, the time of
852 the first row of data is used.
853 method: Interpolation method for regridding the spectra.
854 Choose from "nearest", "linear", "cubic" (default)
855 and "spline"
856 perif: Generate aligners per freqID (no doppler tracking) or
857 per IF (scan-based doppler tracking)
858 insitu: if False a new scantable is returned.
859 Otherwise, the scaling is done in-situ
860 The default is taken from .asaprc (False)
861 """
[876]862 print "Not yet implemented"
863 return
[513]864 if insitu is None: insitu = rcParams['insitu']
[876]865 self._math._setinsitu(insitu)
[513]866 varlist = vars()
867 if reftime is None: reftime = ''
868 perfreqid = not perif
[876]869 s = scantable(self._math._freqalign(self, reftime, method, perfreqid))
870 s._add_history("freq_align", varlist)
871 print_log()
872 if insitu: self._assign(s)
873 else: return s
[513]874
[876]875 def opacity(self, tau, insitu=None):
[513]876 """
877 Apply an opacity correction. The data
878 and Tsys are multiplied by the correction factor.
879 Parameters:
880 tau: Opacity from which the correction factor is
881 exp(tau*ZD)
882 where ZD is the zenith-distance
883 insitu: if False a new scantable is returned.
884 Otherwise, the scaling is done in-situ
885 The default is taken from .asaprc (False)
886 """
887 if insitu is None: insitu = rcParams['insitu']
[876]888 self._math._setinsitu(insitu)
[513]889 varlist = vars()
[876]890 s = scantable(self._math._opacity(self, tau))
891 s._add_history("opacity", varlist)
892 print_log()
893 if insitu: self._assign(s)
894 else: return s
[513]895
896 def bin(self, width=5, insitu=None):
897 """
898 Return a scan where all spectra have been binned up.
899 width: The bin width (default=5) in pixels
900 insitu: if False a new scantable is returned.
901 Otherwise, the scaling is done in-situ
902 The default is taken from .asaprc (False)
903 """
904 if insitu is None: insitu = rcParams['insitu']
[876]905 self._math._setinsitu(insitu)
[513]906 varlist = vars()
[876]907 s = scantable(self._math._bin(self, width))
908 s._add_history("bin",varlist)
909 print_log()
910 if insitu: self._assign(s)
911 else: return s
[513]912
[710]913
[513]914 def resample(self, width=5, method='cubic', insitu=None):
915 """
916 Return a scan where all spectra have been binned up
917 width: The bin width (default=5) in pixels
918 method: Interpolation method when correcting from a table.
919 Values are "nearest", "linear", "cubic" (default)
920 and "spline"
921 insitu: if False a new scantable is returned.
922 Otherwise, the scaling is done in-situ
923 The default is taken from .asaprc (False)
924 """
925 if insitu is None: insitu = rcParams['insitu']
[876]926 self._math._setinsitu(insitu)
[513]927 varlist = vars()
[876]928 s = scantable(self._math._resample(self, method, width))
929 s._add_history("resample",varlist)
930 print_log()
931 if insitu: self._assign(s)
932 else: return s
[513]933
934
[876]935# def average_pol(self, mask=None, weight='none', insitu=None):
936# """
937# Average the Polarisations together.
938# The polarisation cursor of the output scan is set to 0
939# Parameters:
940# mask: An optional mask defining the region, where the
941# averaging will be applied. The output will have all
942# specified points masked.
943# weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
944# weighted), or 'tsys' (1/Tsys**2 weighted)
945# insitu: if False a new scantable is returned.
946# Otherwise, the scaling is done in-situ
947# The default is taken from .asaprc (False)
948# """
949# if insitu is None: insitu = rcParams['insitu']
950# self._math._setinsitu(insitu)
951# varlist = vars()
952#
953# if mask is None:
954# mask = ()
955# s = self._math._averagepol(self, mask, weight)
956# s._add_history("average_pol",varlist)
957# print_log()
958# if insitu: self._assign(s)
959# else: return scantable(s)
[513]960
[876]961 def smooth(self, kernel="hanning", width=5.0, insitu=None):
[513]962 """
963 Smooth the spectrum by the specified kernel (conserving flux).
964 Parameters:
965 scan: The input scan
966 kernel: The type of smoothing kernel. Select from
967 'hanning' (default), 'gaussian' and 'boxcar'.
968 The first three characters are sufficient.
969 width: The width of the kernel in pixels. For hanning this is
970 ignored otherwise it defauls to 5 pixels.
971 For 'gaussian' it is the Full Width Half
972 Maximum. For 'boxcar' it is the full width.
973 insitu: if False a new scantable is returned.
974 Otherwise, the scaling is done in-situ
975 The default is taken from .asaprc (False)
976 Example:
977 none
978 """
979 if insitu is None: insitu = rcParams['insitu']
[876]980 self._math._setinsitu(insitu)
[513]981 varlist = vars()
[876]982 s = scantable(self._math._smooth(self,kernel,width))
983 s._add_history("smooth", varlist)
984 print_log()
985 if insitu: self._assign(s)
986 else: return s
[513]987
[876]988
989 def poly_baseline(self, mask=None, order=0, insitu=None):
[513]990 """
991 Return a scan which has been baselined (all rows) by a polynomial.
992 Parameters:
[794]993 scan: a scantable
994 mask: an optional mask
995 order: the order of the polynomial (default is 0)
996 insitu: if False a new scantable is returned.
997 Otherwise, the scaling is done in-situ
998 The default is taken from .asaprc (False)
999 allaxes: If True (default) apply to all spectra. Otherwise
1000 apply only to the selected (beam/pol/if)spectra only
1001 The default is taken from .asaprc (True if none)
[513]1002 Example:
1003 # return a scan baselined by a third order polynomial,
1004 # not using a mask
1005 bscan = scan.poly_baseline(order=3)
[579]1006 """
[513]1007 if insitu is None: insitu = rcParams['insitu']
1008 varlist = vars()
1009 if mask is None:
1010 from numarray import ones
[876]1011 mask = list(ones(self.nchan(-1)))
[513]1012 from asap.asapfitter import fitter
1013 f = fitter()
1014 f.set_scan(self, mask)
1015 f.set_function(poly=order)
[876]1016 s = f.auto_fit(insitu)
1017 s._add_history("poly_baseline", varlist)
[718]1018 print_log()
[876]1019 if insitu: self._assign(s)
1020 else: return s
[513]1021
[880]1022 def auto_poly_baseline(self, mask=None, edge=(0,0), order=0,
1023 threshold=3, insitu=None):
1024 """
1025 Return a scan which has been baselined (all rows) by a polynomial.
1026 Spectral lines are detected first using linefinder and masked out
1027 to avoid them affecting the baseline solution.
1028
1029 Parameters:
1030 mask: an optional mask retreived from scantable
1031 edge: an optional number of channel to drop at
1032 the edge of spectrum. If only one value is
1033 specified, the same number will be dropped from
1034 both sides of the spectrum. Default is to keep
[907]1035 all channels. Nested tuples represent individual
1036 edge selection for different IFs (a number of spectral
1037 channels can be different)
[880]1038 order: the order of the polynomial (default is 0)
1039 threshold: the threshold used by line finder. It is better to
1040 keep it large as only strong lines affect the
1041 baseline solution.
1042 insitu: if False a new scantable is returned.
1043 Otherwise, the scaling is done in-situ
1044 The default is taken from .asaprc (False)
1045
1046 Example:
1047 scan2=scan.auto_poly_baseline(order=7)
1048 """
1049 if insitu is None: insitu = rcParams['insitu']
1050 varlist = vars()
1051 from asap.asapfitter import fitter
1052 from asap.asaplinefind import linefinder
1053 from asap import _is_sequence_or_number as _is_valid
1054
[907]1055 # check whether edge is set up for each IF individually
1056 individualEdge = False;
1057 if len(edge)>1:
1058 if isinstance(edge[0],list) or isinstance(edge[0],tuple):
1059 individualEdge = True;
1060
1061 if not _is_valid(edge, int) and not individualEdge:
[880]1062 raise RuntimeError, "Parameter 'edge' has to be an integer or a \
[907]1063 pair of integers specified as a tuple. Nested tuples are allowed \
1064 to make individual selection for different IFs."
1065
1066 curedge = (0,0)
1067 if individualEdge:
1068 for edge_par in edge:
1069 if not _is_valid(edge,int):
1070 raise RuntimeError, "Each element of the 'edge' tuple has \
1071 to be a pair of integers or an integer."
1072 else:
1073 curedge = edge;
[880]1074
1075 # setup fitter
1076 f = fitter()
1077 f.set_function(poly=order)
1078
1079 # setup line finder
1080 fl=linefinder()
1081 fl.set_options(threshold=threshold)
1082
1083 if not insitu:
1084 workscan=self.copy()
1085 else:
1086 workscan=self
1087
[907]1088 fl.set_scan(workscan)
1089
[880]1090 rows=range(workscan.nrow())
1091 from asap import asaplog
1092 asaplog.push("Processing:")
1093 for r in rows:
1094 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % (workscan.getscan(r),workscan.getbeam(r),workscan.getif(r),workscan.getpol(r), workscan.getcycle(r))
1095 asaplog.push(msg, False)
[907]1096
1097 # figure out edge parameter
1098 if individualEdge:
1099 if len(edge)>=workscan.getif(r):
1100 raise RuntimeError, "Number of edge elements appear to be less than the number of IFs"
1101 curedge = edge[workscan.getif(r)]
1102
1103 # setup line finder
1104 fl.find_lines(r,mask,curedge)
[880]1105 f.set_scan(workscan, fl.get_mask())
1106 f.x = workscan._getabcissa(r)
1107 f.y = workscan._getspectrum(r)
1108 f.data = None
1109 f.fit()
1110 x = f.get_parameters()
1111 workscan._setspectrum(f.fitter.getresidual(), r)
1112 workscan._add_history("poly_baseline", varlist)
1113 if insitu:
1114 self._assign(workscan)
1115 else:
1116 return workscan
1117
[876]1118# def rotate_linpolphase(self, angle):
1119# """
1120# Rotate the phase of the complex polarization O=Q+iU correlation.
1121# This is always done in situ in the raw data. So if you call this
1122# function more than once then each call rotates the phase further.
1123# Parameters:
1124# angle: The angle (degrees) to rotate (add) by.
1125# Examples:
1126# scan.rotate_linpolphase(2.3)
1127# """
1128# varlist = vars()
1129# from asap._asap import _rotate_linpolphase as _rotate
1130# _rotate(self, angle, allaxes)
1131# self._add_history("rotate_linpolphase", varlist)
1132# print_log()
1133# return
1134#
1135#
1136# def rotate_xyphase(self, angle):
1137# """
1138# Rotate the phase of the XY correlation. This is always done in situ
1139# in the data. So if you call this function more than once
1140# then each call rotates the phase further.
1141# Parameters:
1142# angle: The angle (degrees) to rotate (add) by.
1143# Examples:
1144# scan.rotate_xyphase(2.3)
1145# """
1146# varlist = vars()
1147# from asap._asap import _rotate_xyphase
1148# _rotate_xyphase(self, angle, allaxes)
1149# self._add_history("rotate_xyphase", varlist)
1150# print_log()
1151# return
[710]1152
[513]1153
[876]1154 def add(self, offset, insitu=None):
[513]1155 """
1156 Return a scan where all spectra have the offset added
1157 Parameters:
1158 offset: the offset
1159 insitu: if False a new scantable is returned.
1160 Otherwise, the scaling is done in-situ
1161 The default is taken from .asaprc (False)
1162 """
1163 if insitu is None: insitu = rcParams['insitu']
[876]1164 self._math._setinsitu(insitu)
[513]1165 varlist = vars()
[876]1166 s = scantable(self._math._unaryop(self, offset, "ADD", False))
1167 s._add_history("add",varlist)
1168 print_log()
1169 if insitu:
1170 self._assign(s)
1171 else:
[513]1172 return s
1173
[876]1174 def scale(self, factor, tsys=True, insitu=None,):
[513]1175 """
1176 Return a scan where all spectra are scaled by the give 'factor'
1177 Parameters:
1178 factor: the scaling factor
1179 insitu: if False a new scantable is returned.
1180 Otherwise, the scaling is done in-situ
1181 The default is taken from .asaprc (False)
1182 tsys: if True (default) then apply the operation to Tsys
1183 as well as the data
1184 """
1185 if insitu is None: insitu = rcParams['insitu']
[876]1186 self._math._setinsitu(insitu)
[513]1187 varlist = vars()
[876]1188 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
1189 s._add_history("scale",varlist)
1190 print_log()
1191 if insitu:
1192 self._assign(s)
1193 else:
[513]1194 return s
1195
[876]1196 def auto_quotient(self, mode='time', preserve=True):
[670]1197 """
1198 This function allows to build quotients automatically.
1199 It assumes the observation to have the same numer of
1200 "ons" and "offs"
1201 It will support "closest off in time" in the future
1202 Parameters:
1203 mode: the on/off detection mode; 'suffix' (default)
1204 'suffix' identifies 'off' scans by the
1205 trailing '_R' (Mopra/Parkes) or
1206 '_e'/'_w' (Tid)
[710]1207 preserve: you can preserve (default) the continuum or
1208 remove it. The equations used are
[670]1209 preserve: Output = Toff * (on/off) - Toff
1210 remove: Output = Tref * (on/off) - Ton
1211 """
[876]1212 modes = ["time"]
[670]1213 if not mode in modes:
[876]1214 msg = "please provide valid mode. Valid modes are %s" % (modes)
1215 raise ValueError(msg)
1216 varlist = vars()
1217 s = scantable(self._math._quotient(self, mode, preserve))
1218 s._add_history("auto_quotient",varlist)
1219 print_log()
1220 return s
[710]1221
[513]1222
[876]1223
1224
[718]1225 def freq_switch(self, insitu=None):
1226 """
1227 Apply frequency switching to the data.
1228 Parameters:
1229 insitu: if False a new scantable is returned.
1230 Otherwise, the swictching is done in-situ
1231 The default is taken from .asaprc (False)
1232 Example:
1233 none
1234 """
1235 if insitu is None: insitu = rcParams['insitu']
[876]1236 self._math._setinsitu(insitu)
[718]1237 varlist = vars()
[876]1238 s = scantable(self._math._freqswitch(self))
1239 s._add_history("freq_switch",varlist)
1240 print_log()
1241 if insitu: self._assign(s)
1242 else: return s
[718]1243
[780]1244 def recalc_azel(self):
1245 """
1246 Recalculate the azimuth and elevation for each position.
1247 Parameters:
1248 none
1249 Example:
1250 """
1251 varlist = vars()
[876]1252 self._recalcazel()
[780]1253 self._add_history("recalc_azel", varlist)
1254 print_log()
1255 return
1256
[513]1257 def __add__(self, other):
1258 varlist = vars()
1259 s = None
1260 if isinstance(other, scantable):
[876]1261 print "scantable + scantable NYI"
1262 return
[513]1263 elif isinstance(other, float):
[876]1264 s = scantable(self._math._unaryop(self, other, "ADD", False))
[513]1265 else:
[718]1266 raise TypeError("Other input is not a scantable or float value")
[513]1267 s._add_history("operator +", varlist)
[718]1268 print_log()
[513]1269 return s
1270
1271 def __sub__(self, other):
1272 """
1273 implicit on all axes and on Tsys
1274 """
1275 varlist = vars()
1276 s = None
1277 if isinstance(other, scantable):
[876]1278 print "scantable - scantable NYI"
1279 return
[513]1280 elif isinstance(other, float):
[876]1281 s = scantable(self._math._unaryop(self, other, "SUB", False))
[513]1282 else:
[718]1283 raise TypeError("Other input is not a scantable or float value")
[513]1284 s._add_history("operator -", varlist)
[718]1285 print_log()
[513]1286 return s
[710]1287
[513]1288 def __mul__(self, other):
1289 """
1290 implicit on all axes and on Tsys
1291 """
1292 varlist = vars()
1293 s = None
1294 if isinstance(other, scantable):
[876]1295 print "scantable * scantable NYI"
1296 return
[513]1297 elif isinstance(other, float):
[876]1298 s = scantable(self._math._unaryop(self, other, "MUL", False))
[513]1299 else:
[718]1300 raise TypeError("Other input is not a scantable or float value")
[513]1301 s._add_history("operator *", varlist)
[718]1302 print_log()
[513]1303 return s
1304
[710]1305
[513]1306 def __div__(self, other):
1307 """
1308 implicit on all axes and on Tsys
1309 """
1310 varlist = vars()
1311 s = None
1312 if isinstance(other, scantable):
[876]1313 print "scantable / scantable NYI"
1314 return
[513]1315 elif isinstance(other, float):
1316 if other == 0.0:
[718]1317 raise ZeroDivisionError("Dividing by zero is not recommended")
[876]1318 s = scantable(self._math._unaryop(self, other, "DIV", False))
[513]1319 else:
[718]1320 raise TypeError("Other input is not a scantable or float value")
[513]1321 s._add_history("operator /", varlist)
[718]1322 print_log()
[513]1323 return s
1324
[530]1325 def get_fit(self, row=0):
1326 """
1327 Print or return the stored fits for a row in the scantable
1328 Parameters:
1329 row: the row which the fit has been applied to.
1330 """
1331 if row > self.nrow():
1332 return
1333 from asap import asapfit
1334 fit = asapfit(self._getfit(row))
[718]1335 if rcParams['verbose']:
[530]1336 print fit
1337 return
1338 else:
1339 return fit.as_dict()
1340
[484]1341 def _add_history(self, funcname, parameters):
1342 # create date
1343 sep = "##"
1344 from datetime import datetime
1345 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1346 hist = dstr+sep
1347 hist += funcname+sep#cdate+sep
1348 if parameters.has_key('self'): del parameters['self']
1349 for k,v in parameters.iteritems():
1350 if type(v) is dict:
1351 for k2,v2 in v.iteritems():
1352 hist += k2
1353 hist += "="
1354 if isinstance(v2,scantable):
1355 hist += 'scantable'
1356 elif k2 == 'mask':
[513]1357 if isinstance(v2,list) or isinstance(v2,tuple):
1358 hist += str(self._zip_mask(v2))
1359 else:
1360 hist += str(v2)
[484]1361 else:
[513]1362 hist += str(v2)
[484]1363 else:
1364 hist += k
1365 hist += "="
1366 if isinstance(v,scantable):
1367 hist += 'scantable'
1368 elif k == 'mask':
[513]1369 if isinstance(v,list) or isinstance(v,tuple):
1370 hist += str(self._zip_mask(v))
1371 else:
1372 hist += str(v)
[484]1373 else:
1374 hist += str(v)
1375 hist += sep
1376 hist = hist[:-2] # remove trailing '##'
1377 self._addhistory(hist)
1378
[710]1379
[484]1380 def _zip_mask(self, mask):
1381 mask = list(mask)
1382 i = 0
1383 segments = []
1384 while mask[i:].count(1):
1385 i += mask[i:].index(1)
1386 if mask[i:].count(0):
1387 j = i + mask[i:].index(0)
1388 else:
[710]1389 j = len(mask)
[484]1390 segments.append([i,j])
[710]1391 i = j
[484]1392 return segments
[714]1393
[626]1394 def _get_ordinate_label(self):
1395 fu = "("+self.get_fluxunit()+")"
1396 import re
1397 lbl = "Intensity"
1398 if re.match(".K.",fu):
1399 lbl = "Brightness Temperature "+ fu
1400 elif re.match(".Jy.",fu):
1401 lbl = "Flux density "+ fu
1402 return lbl
[710]1403
[876]1404 def _check_ifs(self):
1405 nchans = [self.nchan(i) for i in range(self.nif(-1))]
[889]1406 nchans = filter(lambda t: t > 0, nchans)
[876]1407 return (sum(nchans)/len(nchans) == nchans[0])
Note: See TracBrowser for help on using the repository browser.