source: trunk/python/scantable.py@ 2287

Last change on this file since 2287 was 2286, checked in by Kana Sugimoto, 13 years ago

New Development: No (performance tuning)

JIRA Issue: No

Ready for Test: Yes

Interface Changes: Yes

What Interface Changed: a parameter "filename" is added to Scantable::summary. scantable.summary doesn't return a string anymore

Test Programs: sdlist unittest/ scantable.summary("summary.txt")

Put in Release Notes: Yes

Module(s): sdlist, asap.summary

Description:

scantable.summary is very slow for large data sets (in row number) often outputted
by modern telescopes. It takes > 1.5 hours to list OTF raster scan with 350,000 rows.

This was because, the methods accumulates the whole text string (~700,000 lines) and
returns it as a string. Once the summary string exceed several tens thousands lines,
elapse time increases non-linearly, may be because very massive output string starts
to overweigh the memory.

I updated scantable.summary so that it flushes the summary string more often to file/logger.
After the modification, scantable.summary could list the data mentioned above in ~ 7 minutes.
The side effect of it is that scantable.summary doesn't return summary string anymore.
(But people may not happy with sub-million lines of string anyway.)


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 135.1 KB
RevLine 
[1846]1"""This module defines the scantable class."""
2
[1697]3import os
[1948]4import numpy
[1691]5try:
6 from functools import wraps as wraps_dec
7except ImportError:
8 from asap.compatibility import wraps as wraps_dec
9
[1824]10from asap.env import is_casapy
[876]11from asap._asap import Scantable
[2004]12from asap._asap import filler, msfiller
[1824]13from asap.parameters import rcParams
[1862]14from asap.logging import asaplog, asaplog_post_dec
[1824]15from asap.selector import selector
16from asap.linecatalog import linecatalog
[1600]17from asap.coordinate import coordinate
[1859]18from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
[1907]19from asap.asapfitter import fitter
[102]20
[1689]21
22def preserve_selection(func):
[1691]23 @wraps_dec(func)
[1689]24 def wrap(obj, *args, **kw):
25 basesel = obj.get_selection()
[1857]26 try:
27 val = func(obj, *args, **kw)
28 finally:
29 obj.set_selection(basesel)
[1689]30 return val
31 return wrap
32
[1846]33def is_scantable(filename):
34 """Is the given file a scantable?
[1689]35
[1846]36 Parameters:
37
38 filename: the name of the file/directory to test
39
40 """
[1883]41 if ( os.path.isdir(filename)
42 and os.path.exists(filename+'/table.info')
43 and os.path.exists(filename+'/table.dat') ):
44 f=open(filename+'/table.info')
45 l=f.readline()
46 f.close()
47 #if ( l.find('Scantable') != -1 ):
48 if ( l.find('Measurement Set') == -1 ):
49 return True
50 else:
51 return False
52 else:
53 return False
54## return (os.path.isdir(filename)
55## and not os.path.exists(filename+'/table.f1')
56## and os.path.exists(filename+'/table.info'))
[1697]57
[1883]58def is_ms(filename):
59 """Is the given file a MeasurementSet?
[1697]60
[1883]61 Parameters:
62
63 filename: the name of the file/directory to test
64
65 """
66 if ( os.path.isdir(filename)
67 and os.path.exists(filename+'/table.info')
68 and os.path.exists(filename+'/table.dat') ):
69 f=open(filename+'/table.info')
70 l=f.readline()
71 f.close()
72 if ( l.find('Measurement Set') != -1 ):
73 return True
74 else:
75 return False
76 else:
77 return False
[2186]78
79def normalise_edge_param(edge):
80 """\
81 Convert a given edge value to a one-dimensional array that can be
82 given to baseline-fitting/subtraction functions.
83 The length of the output value will be an even because values for
84 the both sides of spectra are to be contained for each IF. When
85 the length is 2, the values will be applied to all IFs. If the length
86 is larger than 2, it will be 2*ifnos().
87 Accepted format of edge include:
88 * an integer - will be used for both sides of spectra of all IFs.
89 e.g. 10 is converted to [10,10]
[2277]90 * an empty list/tuple [] - converted to [0, 0] and used for all IFs.
[2186]91 * a list/tuple containing an integer - same as the above case.
92 e.g. [10] is converted to [10,10]
93 * a list/tuple containing two integers - will be used for all IFs.
94 e.g. [5,10] is output as it is. no need to convert.
95 * a list/tuple of lists/tuples containing TWO integers -
96 each element of edge will be used for each IF.
[2277]97 e.g. [[5,10],[15,20]] - [5,10] for IF[0] and [15,20] for IF[1].
98
99 If an element contains the same integer values, the input 'edge'
100 parameter can be given in a simpler shape in the following cases:
[2186]101 ** when len(edge)!=2
[2277]102 any elements containing the same values can be replaced
103 to single integers.
104 e.g. [[15,15]] can be simplified to [15] (or [15,15] or 15 also).
105 e.g. [[1,1],[2,2],[3,3]] can be simplified to [1,2,3].
[2186]106 ** when len(edge)=2
107 care is needed for this case: ONLY ONE of the
108 elements can be a single integer,
109 e.g. [[5,5],[10,10]] can be simplified to [5,[10,10]]
[2277]110 or [[5,5],10], but can NOT be simplified to [5,10].
[2186]111 when [5,10] given, it is interpreted as
[2277]112 [[5,10],[5,10],[5,10],...] instead, as shown before.
[2186]113 """
114 from asap import _is_sequence_or_number as _is_valid
115 if isinstance(edge, list) or isinstance(edge, tuple):
116 for edgepar in edge:
117 if not _is_valid(edgepar, int):
118 raise ValueError, "Each element of the 'edge' tuple has \
119 to be a pair of integers or an integer."
120 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
121 if len(edgepar) != 2:
122 raise ValueError, "Each element of the 'edge' tuple has \
123 to be a pair of integers or an integer."
124 else:
125 if not _is_valid(edge, int):
126 raise ValueError, "Parameter 'edge' has to be an integer or a \
127 pair of integers specified as a tuple. \
128 Nested tuples are allowed \
129 to make individual selection for different IFs."
130
131
132 if isinstance(edge, int):
133 edge = [ edge, edge ] # e.g. 3 => [3,3]
134 elif isinstance(edge, list) or isinstance(edge, tuple):
135 if len(edge) == 0:
136 edge = [0, 0] # e.g. [] => [0,0]
137 elif len(edge) == 1:
138 if isinstance(edge[0], int):
139 edge = [ edge[0], edge[0] ] # e.g. [1] => [1,1]
140
141 commonedge = True
142 if len(edge) > 2: commonedge = False
143 else:
144 for edgepar in edge:
145 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
146 commonedge = False
147 break
148
149 if commonedge:
150 if len(edge) > 1:
151 norm_edge = edge
152 else:
153 norm_edge = edge + edge
154 else:
155 norm_edge = []
156 for edgepar in edge:
157 if isinstance(edgepar, int):
158 norm_edge += [edgepar, edgepar]
159 else:
160 norm_edge += edgepar
161
162 return norm_edge
163
164def raise_fitting_failure_exception(e):
165 msg = "The fit failed, possibly because it didn't converge."
166 if rcParams["verbose"]:
167 asaplog.push(str(e))
168 asaplog.push(str(msg))
169 else:
170 raise RuntimeError(str(e)+'\n'+msg)
171
[2189]172def pack_progress_params(showprogress, minnrow):
173 return str(showprogress).lower() + ',' + str(minnrow)
174
[876]175class scantable(Scantable):
[1846]176 """\
177 The ASAP container for scans (single-dish data).
[102]178 """
[1819]179
[1862]180 @asaplog_post_dec
[1916]181 #def __init__(self, filename, average=None, unit=None, getpt=None,
182 # antenna=None, parallactify=None):
[2277]183 def __init__(self, filename, average=None, unit=None, parallactify=None, **args):
[1846]184 """\
[102]185 Create a scantable from a saved one or make a reference
[1846]186
[102]187 Parameters:
[1846]188
189 filename: the name of an asap table on disk
190 or
191 the name of a rpfits/sdfits/ms file
192 (integrations within scans are auto averaged
193 and the whole file is read) or
194 [advanced] a reference to an existing scantable
195
196 average: average all integrations withinb a scan on read.
197 The default (True) is taken from .asaprc.
198
[484]199 unit: brightness unit; must be consistent with K or Jy.
[1846]200 Over-rides the default selected by the filler
201 (input rpfits/sdfits/ms) or replaces the value
202 in existing scantables
203
204 getpt: for MeasurementSet input data only:
205 If True, all pointing data are filled.
206 The deafult is False, which makes time to load
207 the MS data faster in some cases.
208
[1920]209 antenna: for MeasurementSet input data only:
[2277]210 Antenna selection. integer (id) or string (name or id).
[1846]211
212 parallactify: Indicate that the data had been parallatified. Default
213 is taken from rc file.
214
[710]215 """
[976]216 if average is None:
[710]217 average = rcParams['scantable.autoaverage']
[1916]218 #if getpt is None:
219 # getpt = True
220 #if antenna is not None:
221 # asaplog.push("Antenna selection currently unsupported."
222 # "Using ''")
223 # asaplog.post('WARN')
224 #if antenna is None:
225 # antenna = ''
226 #elif type(antenna) == int:
227 # antenna = '%s' % antenna
228 #elif type(antenna) == list:
229 # tmpstr = ''
230 # for i in range( len(antenna) ):
231 # if type(antenna[i]) == int:
232 # tmpstr = tmpstr + ('%s,'%(antenna[i]))
233 # elif type(antenna[i]) == str:
234 # tmpstr=tmpstr+antenna[i]+','
235 # else:
236 # raise TypeError('Bad antenna selection.')
237 # antenna = tmpstr.rstrip(',')
[1593]238 parallactify = parallactify or rcParams['scantable.parallactify']
[1259]239 varlist = vars()
[876]240 from asap._asap import stmath
[1819]241 self._math = stmath( rcParams['insitu'] )
[876]242 if isinstance(filename, Scantable):
243 Scantable.__init__(self, filename)
[181]244 else:
[1697]245 if isinstance(filename, str):
[976]246 filename = os.path.expandvars(filename)
247 filename = os.path.expanduser(filename)
248 if not os.path.exists(filename):
249 s = "File '%s' not found." % (filename)
250 raise IOError(s)
[1697]251 if is_scantable(filename):
252 ondisk = rcParams['scantable.storage'] == 'disk'
253 Scantable.__init__(self, filename, ondisk)
254 if unit is not None:
255 self.set_fluxunit(unit)
[2008]256 if average:
257 self._assign( self.average_time( scanav=True ) )
[1819]258 # do not reset to the default freqframe
259 #self.set_freqframe(rcParams['scantable.freqframe'])
[1883]260 #elif os.path.isdir(filename) \
261 # and not os.path.exists(filename+'/table.f1'):
262 elif is_ms(filename):
[1916]263 # Measurement Set
264 opts={'ms': {}}
265 mskeys=['getpt','antenna']
266 for key in mskeys:
267 if key in args.keys():
268 opts['ms'][key] = args[key]
269 #self._fill([filename], unit, average, getpt, antenna)
270 self._fill([filename], unit, average, opts)
[1893]271 elif os.path.isfile(filename):
[1916]272 #self._fill([filename], unit, average, getpt, antenna)
273 self._fill([filename], unit, average)
[1883]274 else:
[1819]275 msg = "The given file '%s'is not a valid " \
276 "asap table." % (filename)
[1859]277 raise IOError(msg)
[1118]278 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]279 and isinstance(filename[-1], str):
[1916]280 #self._fill(filename, unit, average, getpt, antenna)
281 self._fill(filename, unit, average)
[1586]282 self.parallactify(parallactify)
[1259]283 self._add_history("scantable", varlist)
[102]284
[1862]285 @asaplog_post_dec
[876]286 def save(self, name=None, format=None, overwrite=False):
[1846]287 """\
[1280]288 Store the scantable on disk. This can be an asap (aips++) Table,
289 SDFITS or MS2 format.
[1846]290
[116]291 Parameters:
[1846]292
[1093]293 name: the name of the outputfile. For format "ASCII"
294 this is the root file name (data in 'name'.txt
[497]295 and header in 'name'_header.txt)
[1855]296
[116]297 format: an optional file format. Default is ASAP.
[1855]298 Allowed are:
299
300 * 'ASAP' (save as ASAP [aips++] Table),
301 * 'SDFITS' (save as SDFITS file)
302 * 'ASCII' (saves as ascii text file)
303 * 'MS2' (saves as an casacore MeasurementSet V2)
[2277]304 * 'FITS' (save as image FITS - not readable by class)
[1855]305 * 'CLASS' (save as FITS readable by CLASS)
306
[411]307 overwrite: If the file should be overwritten if it exists.
[256]308 The default False is to return with warning
[411]309 without writing the output. USE WITH CARE.
[1855]310
[1846]311 Example::
312
[116]313 scan.save('myscan.asap')
[1118]314 scan.save('myscan.sdfits', 'SDFITS')
[1846]315
[116]316 """
[411]317 from os import path
[1593]318 format = format or rcParams['scantable.save']
[256]319 suffix = '.'+format.lower()
[1118]320 if name is None or name == "":
[256]321 name = 'scantable'+suffix
[718]322 msg = "No filename given. Using default name %s..." % name
323 asaplog.push(msg)
[411]324 name = path.expandvars(name)
[256]325 if path.isfile(name) or path.isdir(name):
326 if not overwrite:
[718]327 msg = "File %s exists." % name
[1859]328 raise IOError(msg)
[451]329 format2 = format.upper()
330 if format2 == 'ASAP':
[116]331 self._save(name)
[2029]332 elif format2 == 'MS2':
333 msopt = {'ms': {'overwrite': overwrite } }
334 from asap._asap import mswriter
335 writer = mswriter( self )
336 writer.write( name, msopt )
[116]337 else:
[989]338 from asap._asap import stwriter as stw
[1118]339 writer = stw(format2)
340 writer.write(self, name)
[116]341 return
342
[102]343 def copy(self):
[1846]344 """Return a copy of this scantable.
345
346 *Note*:
347
[1348]348 This makes a full (deep) copy. scan2 = scan1 makes a reference.
[1846]349
350 Example::
351
[102]352 copiedscan = scan.copy()
[1846]353
[102]354 """
[876]355 sd = scantable(Scantable._copy(self))
[113]356 return sd
357
[1093]358 def drop_scan(self, scanid=None):
[1846]359 """\
[1093]360 Return a new scantable where the specified scan number(s) has(have)
361 been dropped.
[1846]362
[1093]363 Parameters:
[1846]364
[1093]365 scanid: a (list of) scan number(s)
[1846]366
[1093]367 """
368 from asap import _is_sequence_or_number as _is_valid
369 from asap import _to_list
370 from asap import unique
371 if not _is_valid(scanid):
[2277]372 raise RuntimeError( 'Please specify a scanno to drop from the scantable' )
[1859]373 scanid = _to_list(scanid)
374 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
375 for sid in scanid: allscans.remove(sid)
376 if len(allscans) == 0:
377 raise ValueError("Can't remove all scans")
378 sel = selector(scans=allscans)
379 return self._select_copy(sel)
[1093]380
[1594]381 def _select_copy(self, selection):
382 orig = self.get_selection()
383 self.set_selection(orig+selection)
384 cp = self.copy()
385 self.set_selection(orig)
386 return cp
387
[102]388 def get_scan(self, scanid=None):
[1855]389 """\
[102]390 Return a specific scan (by scanno) or collection of scans (by
391 source name) in a new scantable.
[1846]392
393 *Note*:
394
[1348]395 See scantable.drop_scan() for the inverse operation.
[1846]396
[102]397 Parameters:
[1846]398
[513]399 scanid: a (list of) scanno or a source name, unix-style
400 patterns are accepted for source name matching, e.g.
401 '*_R' gets all 'ref scans
[1846]402
403 Example::
404
[513]405 # get all scans containing the source '323p459'
406 newscan = scan.get_scan('323p459')
407 # get all 'off' scans
408 refscans = scan.get_scan('*_R')
409 # get a susbset of scans by scanno (as listed in scan.summary())
[1118]410 newscan = scan.get_scan([0, 2, 7, 10])
[1846]411
[102]412 """
413 if scanid is None:
[1859]414 raise RuntimeError( 'Please specify a scan no or name to '
415 'retrieve from the scantable' )
[102]416 try:
[946]417 bsel = self.get_selection()
418 sel = selector()
[102]419 if type(scanid) is str:
[946]420 sel.set_name(scanid)
[1594]421 return self._select_copy(sel)
[102]422 elif type(scanid) is int:
[946]423 sel.set_scans([scanid])
[1594]424 return self._select_copy(sel)
[381]425 elif type(scanid) is list:
[946]426 sel.set_scans(scanid)
[1594]427 return self._select_copy(sel)
[381]428 else:
[718]429 msg = "Illegal scanid type, use 'int' or 'list' if ints."
[1859]430 raise TypeError(msg)
[102]431 except RuntimeError:
[1859]432 raise
[102]433
434 def __str__(self):
[2178]435 return Scantable._summary(self)
[102]436
[976]437 def summary(self, filename=None):
[1846]438 """\
[102]439 Print a summary of the contents of this scantable.
[1846]440
[102]441 Parameters:
[1846]442
[1931]443 filename: the name of a file to write the putput to
[102]444 Default - no file output
[1846]445
[102]446 """
[2286]447# info = Scantable._summary(self)
[102]448 if filename is not None:
[256]449 if filename is "":
450 filename = 'scantable_summary.txt'
[415]451 from os.path import expandvars, isdir
[411]452 filename = expandvars(filename)
[2286]453# if not isdir(filename):
454# data = open(filename, 'w')
455# data.write(info)
456# data.close()
457# else:
458 if isdir(filename):
[718]459 msg = "Illegal file name '%s'." % (filename)
[1859]460 raise IOError(msg)
[2286]461 else:
462 filename = ""
463 Scantable._summary(self, filename)
464# return page(info)
[710]465
[1512]466 def get_spectrum(self, rowno):
[1471]467 """Return the spectrum for the current row in the scantable as a list.
[1846]468
[1471]469 Parameters:
[1846]470
[1573]471 rowno: the row number to retrieve the spectrum from
[1846]472
[1471]473 """
474 return self._getspectrum(rowno)
[946]475
[1471]476 def get_mask(self, rowno):
477 """Return the mask for the current row in the scantable as a list.
[1846]478
[1471]479 Parameters:
[1846]480
[1573]481 rowno: the row number to retrieve the mask from
[1846]482
[1471]483 """
484 return self._getmask(rowno)
485
486 def set_spectrum(self, spec, rowno):
[1938]487 """Set the spectrum for the current row in the scantable.
[1846]488
[1471]489 Parameters:
[1846]490
[1855]491 spec: the new spectrum
[1846]492
[1855]493 rowno: the row number to set the spectrum for
494
[1471]495 """
496 assert(len(spec) == self.nchan())
497 return self._setspectrum(spec, rowno)
498
[1600]499 def get_coordinate(self, rowno):
500 """Return the (spectral) coordinate for a a given 'rowno'.
[1846]501
502 *Note*:
503
[1600]504 * This coordinate is only valid until a scantable method modifies
505 the frequency axis.
506 * This coordinate does contain the original frequency set-up
507 NOT the new frame. The conversions however are done using the user
508 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
509 use scantable.freq_align first. Without it there is no closure,
[1846]510 i.e.::
[1600]511
[1846]512 c = myscan.get_coordinate(0)
513 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
514
[1600]515 Parameters:
[1846]516
[1600]517 rowno: the row number for the spectral coordinate
518
519 """
520 return coordinate(Scantable.get_coordinate(self, rowno))
521
[946]522 def get_selection(self):
[1846]523 """\
[1005]524 Get the selection object currently set on this scantable.
[1846]525
526 Example::
527
[1005]528 sel = scan.get_selection()
529 sel.set_ifs(0) # select IF 0
530 scan.set_selection(sel) # apply modified selection
[1846]531
[946]532 """
533 return selector(self._getselection())
534
[1576]535 def set_selection(self, selection=None, **kw):
[1846]536 """\
[1005]537 Select a subset of the data. All following operations on this scantable
538 are only applied to thi selection.
[1846]539
[1005]540 Parameters:
[1697]541
[1846]542 selection: a selector object (default unset the selection), or
543 any combination of "pols", "ifs", "beams", "scans",
544 "cycles", "name", "query"
[1697]545
[1846]546 Examples::
[1697]547
[1005]548 sel = selector() # create a selection object
[1118]549 self.set_scans([0, 3]) # select SCANNO 0 and 3
[1005]550 scan.set_selection(sel) # set the selection
551 scan.summary() # will only print summary of scanno 0 an 3
552 scan.set_selection() # unset the selection
[1697]553 # or the equivalent
554 scan.set_selection(scans=[0,3])
555 scan.summary() # will only print summary of scanno 0 an 3
556 scan.set_selection() # unset the selection
[1846]557
[946]558 """
[1576]559 if selection is None:
560 # reset
561 if len(kw) == 0:
562 selection = selector()
563 else:
564 # try keywords
565 for k in kw:
566 if k not in selector.fields:
567 raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
568 selection = selector(**kw)
[946]569 self._setselection(selection)
570
[1819]571 def get_row(self, row=0, insitu=None):
[1846]572 """\
[1819]573 Select a row in the scantable.
574 Return a scantable with single row.
[1846]575
[1819]576 Parameters:
[1846]577
578 row: row no of integration, default is 0.
579 insitu: if False a new scantable is returned. Otherwise, the
580 scaling is done in-situ. The default is taken from .asaprc
581 (False)
582
[1819]583 """
584 if insitu is None: insitu = rcParams['insitu']
585 if not insitu:
586 workscan = self.copy()
587 else:
588 workscan = self
589 # Select a row
590 sel=selector()
[1992]591 sel.set_rows([row])
592 #sel.set_scans([workscan.getscan(row)])
593 #sel.set_cycles([workscan.getcycle(row)])
594 #sel.set_beams([workscan.getbeam(row)])
595 #sel.set_ifs([workscan.getif(row)])
596 #sel.set_polarisations([workscan.getpol(row)])
597 #sel.set_name(workscan._getsourcename(row))
[1819]598 workscan.set_selection(sel)
599 if not workscan.nrow() == 1:
600 msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
601 raise RuntimeError(msg)
602 del sel
603 if insitu:
604 self._assign(workscan)
605 else:
606 return workscan
607
[1862]608 @asaplog_post_dec
[1907]609 def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
[1846]610 """\
[135]611 Determine the specified statistic of the current beam/if/pol
[102]612 Takes a 'mask' as an optional parameter to specify which
613 channels should be excluded.
[1846]614
[102]615 Parameters:
[1846]616
[1819]617 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
618 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
[1855]619
[135]620 mask: an optional mask specifying where the statistic
[102]621 should be determined.
[1855]622
[1819]623 form: format string to print statistic values
[1846]624
[1907]625 row: row number of spectrum to process.
626 (default is None: for all rows)
[1846]627
[1907]628 Example:
[113]629 scan.set_unit('channel')
[1118]630 msk = scan.create_mask([100, 200], [500, 600])
[135]631 scan.stats(stat='mean', mask=m)
[1846]632
[102]633 """
[1593]634 mask = mask or []
[876]635 if not self._check_ifs():
[1118]636 raise ValueError("Cannot apply mask as the IFs have different "
637 "number of channels. Please use setselection() "
638 "to select individual IFs")
[1819]639 rtnabc = False
640 if stat.lower().endswith('_abc'): rtnabc = True
641 getchan = False
642 if stat.lower().startswith('min') or stat.lower().startswith('max'):
643 chan = self._math._minmaxchan(self, mask, stat)
644 getchan = True
645 statvals = []
[1907]646 if not rtnabc:
647 if row == None:
648 statvals = self._math._stats(self, mask, stat)
649 else:
650 statvals = self._math._statsrow(self, mask, stat, int(row))
[256]651
[1819]652 #def cb(i):
653 # return statvals[i]
[256]654
[1819]655 #return self._row_callback(cb, stat)
[102]656
[1819]657 label=stat
658 #callback=cb
659 out = ""
660 #outvec = []
661 sep = '-'*50
[1907]662
663 if row == None:
664 rows = xrange(self.nrow())
665 elif isinstance(row, int):
666 rows = [ row ]
667
668 for i in rows:
[1819]669 refstr = ''
670 statunit= ''
671 if getchan:
672 qx, qy = self.chan2data(rowno=i, chan=chan[i])
673 if rtnabc:
674 statvals.append(qx['value'])
675 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
676 statunit= '['+qx['unit']+']'
677 else:
678 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
679
680 tm = self._gettime(i)
681 src = self._getsourcename(i)
682 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
683 out += 'Time[%s]:\n' % (tm)
[1907]684 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (self.getbeam(i))
685 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
686 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
[1819]687 #outvec.append(callback(i))
[1907]688 if len(rows) > 1:
689 # out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
690 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
691 else:
692 # out += ('= %'+form) % (outvec[0]) +' '+refstr+'\n'
693 out += ('= %'+form) % (statvals[0]) +' '+refstr+'\n'
[1819]694 out += sep+"\n"
695
[1859]696 import os
697 if os.environ.has_key( 'USER' ):
698 usr = os.environ['USER']
699 else:
700 import commands
701 usr = commands.getoutput( 'whoami' )
702 tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
703 f = open(tmpfile,'w')
704 print >> f, sep
705 print >> f, ' %s %s' % (label, statunit)
706 print >> f, sep
707 print >> f, out
708 f.close()
709 f = open(tmpfile,'r')
710 x = f.readlines()
711 f.close()
712 asaplog.push(''.join(x), False)
713
[1819]714 return statvals
715
716 def chan2data(self, rowno=0, chan=0):
[1846]717 """\
[1819]718 Returns channel/frequency/velocity and spectral value
719 at an arbitrary row and channel in the scantable.
[1846]720
[1819]721 Parameters:
[1846]722
[1819]723 rowno: a row number in the scantable. Default is the
724 first row, i.e. rowno=0
[1855]725
[1819]726 chan: a channel in the scantable. Default is the first
727 channel, i.e. pos=0
[1846]728
[1819]729 """
730 if isinstance(rowno, int) and isinstance(chan, int):
731 qx = {'unit': self.get_unit(),
732 'value': self._getabcissa(rowno)[chan]}
733 qy = {'unit': self.get_fluxunit(),
734 'value': self._getspectrum(rowno)[chan]}
735 return qx, qy
736
[1118]737 def stddev(self, mask=None):
[1846]738 """\
[135]739 Determine the standard deviation of the current beam/if/pol
740 Takes a 'mask' as an optional parameter to specify which
741 channels should be excluded.
[1846]742
[135]743 Parameters:
[1846]744
[135]745 mask: an optional mask specifying where the standard
746 deviation should be determined.
747
[1846]748 Example::
749
[135]750 scan.set_unit('channel')
[1118]751 msk = scan.create_mask([100, 200], [500, 600])
[135]752 scan.stddev(mask=m)
[1846]753
[135]754 """
[1118]755 return self.stats(stat='stddev', mask=mask);
[135]756
[1003]757
[1259]758 def get_column_names(self):
[1846]759 """\
[1003]760 Return a list of column names, which can be used for selection.
761 """
[1259]762 return list(Scantable.get_column_names(self))
[1003]763
[1730]764 def get_tsys(self, row=-1):
[1846]765 """\
[113]766 Return the System temperatures.
[1846]767
768 Parameters:
769
770 row: the rowno to get the information for. (default all rows)
771
[113]772 Returns:
[1846]773
[876]774 a list of Tsys values for the current selection
[1846]775
[113]776 """
[1730]777 if row > -1:
778 return self._get_column(self._gettsys, row)
[876]779 return self._row_callback(self._gettsys, "Tsys")
[256]780
[1730]781
782 def get_weather(self, row=-1):
[1846]783 """\
784 Return the weather informations.
785
786 Parameters:
787
788 row: the rowno to get the information for. (default all rows)
789
790 Returns:
791
792 a dict or list of of dicts of values for the current selection
793
794 """
795
[1730]796 values = self._get_column(self._get_weather, row)
797 if row > -1:
798 return {'temperature': values[0],
799 'pressure': values[1], 'humidity' : values[2],
800 'windspeed' : values[3], 'windaz' : values[4]
801 }
802 else:
803 out = []
804 for r in values:
805
806 out.append({'temperature': r[0],
807 'pressure': r[1], 'humidity' : r[2],
808 'windspeed' : r[3], 'windaz' : r[4]
809 })
810 return out
811
[876]812 def _row_callback(self, callback, label):
813 out = ""
[1118]814 outvec = []
[1590]815 sep = '-'*50
[876]816 for i in range(self.nrow()):
817 tm = self._gettime(i)
818 src = self._getsourcename(i)
[1590]819 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
[876]820 out += 'Time[%s]:\n' % (tm)
[1590]821 if self.nbeam(-1) > 1:
822 out += ' Beam[%d] ' % (self.getbeam(i))
823 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
824 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
[876]825 outvec.append(callback(i))
826 out += '= %3.3f\n' % (outvec[i])
[1590]827 out += sep+'\n'
[1859]828
829 asaplog.push(sep)
830 asaplog.push(" %s" % (label))
831 asaplog.push(sep)
832 asaplog.push(out)
[1861]833 asaplog.post()
[1175]834 return outvec
[256]835
[1947]836 def _get_column(self, callback, row=-1, *args):
[1070]837 """
838 """
839 if row == -1:
[1947]840 return [callback(i, *args) for i in range(self.nrow())]
[1070]841 else:
[1819]842 if 0 <= row < self.nrow():
[1947]843 return callback(row, *args)
[256]844
[1070]845
[1948]846 def get_time(self, row=-1, asdatetime=False, prec=-1):
[1846]847 """\
[113]848 Get a list of time stamps for the observations.
[1938]849 Return a datetime object or a string (default) for each
850 integration time stamp in the scantable.
[1846]851
[113]852 Parameters:
[1846]853
[1348]854 row: row no of integration. Default -1 return all rows
[1855]855
[1348]856 asdatetime: return values as datetime objects rather than strings
[1846]857
[2277]858 prec: number of digits shown. Default -1 to automatic calculation.
[1948]859 Note this number is equals to the digits of MVTime,
860 i.e., 0<prec<3: dates with hh:: only,
861 <5: with hh:mm:, <7 or 0: with hh:mm:ss,
[1947]862 and 6> : with hh:mm:ss.tt... (prec-6 t's added)
863
[113]864 """
[1175]865 from datetime import datetime
[1948]866 if prec < 0:
867 # automagically set necessary precision +1
[1950]868 prec = 7 - numpy.floor(numpy.log10(numpy.min(self.get_inttime(row))))
[1948]869 prec = max(6, int(prec))
870 else:
871 prec = max(0, prec)
872 if asdatetime:
873 #precision can be 1 millisecond at max
874 prec = min(12, prec)
875
[1947]876 times = self._get_column(self._gettime, row, prec)
[1348]877 if not asdatetime:
[1392]878 return times
[1947]879 format = "%Y/%m/%d/%H:%M:%S.%f"
880 if prec < 7:
881 nsub = 1 + (((6-prec)/2) % 3)
882 substr = [".%f","%S","%M"]
883 for i in range(nsub):
884 format = format.replace(substr[i],"")
[1175]885 if isinstance(times, list):
[1947]886 return [datetime.strptime(i, format) for i in times]
[1175]887 else:
[1947]888 return datetime.strptime(times, format)
[102]889
[1348]890
891 def get_inttime(self, row=-1):
[1846]892 """\
[1348]893 Get a list of integration times for the observations.
894 Return a time in seconds for each integration in the scantable.
[1846]895
[1348]896 Parameters:
[1846]897
[1348]898 row: row no of integration. Default -1 return all rows.
[1846]899
[1348]900 """
[1573]901 return self._get_column(self._getinttime, row)
[1348]902
[1573]903
[714]904 def get_sourcename(self, row=-1):
[1846]905 """\
[794]906 Get a list source names for the observations.
[714]907 Return a string for each integration in the scantable.
908 Parameters:
[1846]909
[1348]910 row: row no of integration. Default -1 return all rows.
[1846]911
[714]912 """
[1070]913 return self._get_column(self._getsourcename, row)
[714]914
[794]915 def get_elevation(self, row=-1):
[1846]916 """\
[794]917 Get a list of elevations for the observations.
918 Return a float for each integration in the scantable.
[1846]919
[794]920 Parameters:
[1846]921
[1348]922 row: row no of integration. Default -1 return all rows.
[1846]923
[794]924 """
[1070]925 return self._get_column(self._getelevation, row)
[794]926
927 def get_azimuth(self, row=-1):
[1846]928 """\
[794]929 Get a list of azimuths for the observations.
930 Return a float for each integration in the scantable.
[1846]931
[794]932 Parameters:
[1348]933 row: row no of integration. Default -1 return all rows.
[1846]934
[794]935 """
[1070]936 return self._get_column(self._getazimuth, row)
[794]937
938 def get_parangle(self, row=-1):
[1846]939 """\
[794]940 Get a list of parallactic angles for the observations.
941 Return a float for each integration in the scantable.
[1846]942
[794]943 Parameters:
[1846]944
[1348]945 row: row no of integration. Default -1 return all rows.
[1846]946
[794]947 """
[1070]948 return self._get_column(self._getparangle, row)
[794]949
[1070]950 def get_direction(self, row=-1):
951 """
952 Get a list of Positions on the sky (direction) for the observations.
[1594]953 Return a string for each integration in the scantable.
[1855]954
[1070]955 Parameters:
[1855]956
[1070]957 row: row no of integration. Default -1 return all rows
[1855]958
[1070]959 """
960 return self._get_column(self._getdirection, row)
961
[1391]962 def get_directionval(self, row=-1):
[1846]963 """\
[1391]964 Get a list of Positions on the sky (direction) for the observations.
965 Return a float for each integration in the scantable.
[1846]966
[1391]967 Parameters:
[1846]968
[1391]969 row: row no of integration. Default -1 return all rows
[1846]970
[1391]971 """
972 return self._get_column(self._getdirectionvec, row)
973
[1862]974 @asaplog_post_dec
[102]975 def set_unit(self, unit='channel'):
[1846]976 """\
[102]977 Set the unit for all following operations on this scantable
[1846]978
[102]979 Parameters:
[1846]980
981 unit: optional unit, default is 'channel'. Use one of '*Hz',
982 'km/s', 'channel' or equivalent ''
983
[102]984 """
[484]985 varlist = vars()
[1118]986 if unit in ['', 'pixel', 'channel']:
[113]987 unit = ''
988 inf = list(self._getcoordinfo())
989 inf[0] = unit
990 self._setcoordinfo(inf)
[1118]991 self._add_history("set_unit", varlist)
[113]992
[1862]993 @asaplog_post_dec
[484]994 def set_instrument(self, instr):
[1846]995 """\
[1348]996 Set the instrument for subsequent processing.
[1846]997
[358]998 Parameters:
[1846]999
[710]1000 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]1001 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[1846]1002
[358]1003 """
1004 self._setInstrument(instr)
[1118]1005 self._add_history("set_instument", vars())
[358]1006
[1862]1007 @asaplog_post_dec
[1190]1008 def set_feedtype(self, feedtype):
[1846]1009 """\
[1190]1010 Overwrite the feed type, which might not be set correctly.
[1846]1011
[1190]1012 Parameters:
[1846]1013
[1190]1014 feedtype: 'linear' or 'circular'
[1846]1015
[1190]1016 """
1017 self._setfeedtype(feedtype)
1018 self._add_history("set_feedtype", vars())
1019
[1862]1020 @asaplog_post_dec
[276]1021 def set_doppler(self, doppler='RADIO'):
[1846]1022 """\
[276]1023 Set the doppler for all following operations on this scantable.
[1846]1024
[276]1025 Parameters:
[1846]1026
[276]1027 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
[1846]1028
[276]1029 """
[484]1030 varlist = vars()
[276]1031 inf = list(self._getcoordinfo())
1032 inf[2] = doppler
1033 self._setcoordinfo(inf)
[1118]1034 self._add_history("set_doppler", vars())
[710]1035
[1862]1036 @asaplog_post_dec
[226]1037 def set_freqframe(self, frame=None):
[1846]1038 """\
[113]1039 Set the frame type of the Spectral Axis.
[1846]1040
[113]1041 Parameters:
[1846]1042
[591]1043 frame: an optional frame type, default 'LSRK'. Valid frames are:
[1819]1044 'TOPO', 'LSRD', 'LSRK', 'BARY',
[1118]1045 'GEO', 'GALACTO', 'LGROUP', 'CMB'
[1846]1046
1047 Example::
1048
[113]1049 scan.set_freqframe('BARY')
[1846]1050
[113]1051 """
[1593]1052 frame = frame or rcParams['scantable.freqframe']
[484]1053 varlist = vars()
[1819]1054 # "REST" is not implemented in casacore
1055 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
1056 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1057 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
[1118]1058 'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]1059
[989]1060 if frame in valid:
[113]1061 inf = list(self._getcoordinfo())
1062 inf[1] = frame
1063 self._setcoordinfo(inf)
[1118]1064 self._add_history("set_freqframe", varlist)
[102]1065 else:
[1118]1066 msg = "Please specify a valid freq type. Valid types are:\n", valid
[1859]1067 raise TypeError(msg)
[710]1068
[1862]1069 @asaplog_post_dec
[989]1070 def set_dirframe(self, frame=""):
[1846]1071 """\
[989]1072 Set the frame type of the Direction on the sky.
[1846]1073
[989]1074 Parameters:
[1846]1075
[989]1076 frame: an optional frame type, default ''. Valid frames are:
1077 'J2000', 'B1950', 'GALACTIC'
[1846]1078
1079 Example:
1080
[989]1081 scan.set_dirframe('GALACTIC')
[1846]1082
[989]1083 """
1084 varlist = vars()
[1859]1085 Scantable.set_dirframe(self, frame)
[1118]1086 self._add_history("set_dirframe", varlist)
[989]1087
[113]1088 def get_unit(self):
[1846]1089 """\
[113]1090 Get the default unit set in this scantable
[1846]1091
[113]1092 Returns:
[1846]1093
[113]1094 A unit string
[1846]1095
[113]1096 """
1097 inf = self._getcoordinfo()
1098 unit = inf[0]
1099 if unit == '': unit = 'channel'
1100 return unit
[102]1101
[1862]1102 @asaplog_post_dec
[158]1103 def get_abcissa(self, rowno=0):
[1846]1104 """\
[158]1105 Get the abcissa in the current coordinate setup for the currently
[113]1106 selected Beam/IF/Pol
[1846]1107
[113]1108 Parameters:
[1846]1109
[226]1110 rowno: an optional row number in the scantable. Default is the
1111 first row, i.e. rowno=0
[1846]1112
[113]1113 Returns:
[1846]1114
[1348]1115 The abcissa values and the format string (as a dictionary)
[1846]1116
[113]1117 """
[256]1118 abc = self._getabcissa(rowno)
[710]1119 lbl = self._getabcissalabel(rowno)
[158]1120 return abc, lbl
[113]1121
[1862]1122 @asaplog_post_dec
[1994]1123 def flag(self, row=-1, mask=None, unflag=False):
[1846]1124 """\
[1001]1125 Flag the selected data using an optional channel mask.
[1846]1126
[1001]1127 Parameters:
[1846]1128
[1994]1129 row: an optional row number in the scantable.
1130 Default -1 flags all rows
1131
[1001]1132 mask: an optional channel mask, created with create_mask. Default
1133 (no mask) is all channels.
[1855]1134
[1819]1135 unflag: if True, unflag the data
[1846]1136
[1001]1137 """
1138 varlist = vars()
[1593]1139 mask = mask or []
[1994]1140 self._flag(row, mask, unflag)
[1001]1141 self._add_history("flag", varlist)
1142
[1862]1143 @asaplog_post_dec
[1819]1144 def flag_row(self, rows=[], unflag=False):
[1846]1145 """\
[1819]1146 Flag the selected data in row-based manner.
[1846]1147
[1819]1148 Parameters:
[1846]1149
[1843]1150 rows: list of row numbers to be flagged. Default is no row
[2277]1151 (must be explicitly specified to execute row-based flagging).
[1855]1152
[1819]1153 unflag: if True, unflag the data.
[1846]1154
[1819]1155 """
1156 varlist = vars()
[1859]1157 self._flag_row(rows, unflag)
[1819]1158 self._add_history("flag_row", varlist)
1159
[1862]1160 @asaplog_post_dec
[1819]1161 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
[1846]1162 """\
[1819]1163 Flag the selected data outside a specified range (in channel-base)
[1846]1164
[1819]1165 Parameters:
[1846]1166
[1819]1167 uthres: upper threshold.
[1855]1168
[1819]1169 dthres: lower threshold
[1846]1170
[2277]1171 clipoutside: True for flagging data outside the range [dthres:uthres].
[1928]1172 False for flagging data inside the range.
[1855]1173
[1846]1174 unflag: if True, unflag the data.
1175
[1819]1176 """
1177 varlist = vars()
[1859]1178 self._clip(uthres, dthres, clipoutside, unflag)
[1819]1179 self._add_history("clip", varlist)
1180
[1862]1181 @asaplog_post_dec
[1584]1182 def lag_flag(self, start, end, unit="MHz", insitu=None):
[1846]1183 """\
[1192]1184 Flag the data in 'lag' space by providing a frequency to remove.
[2177]1185 Flagged data in the scantable get interpolated over the region.
[1192]1186 No taper is applied.
[1846]1187
[1192]1188 Parameters:
[1846]1189
[1579]1190 start: the start frequency (really a period within the
1191 bandwidth) or period to remove
[1855]1192
[1579]1193 end: the end frequency or period to remove
[1855]1194
[1584]1195 unit: the frequency unit (default "MHz") or "" for
[1579]1196 explicit lag channels
[1846]1197
1198 *Notes*:
1199
[1579]1200 It is recommended to flag edges of the band or strong
[1348]1201 signals beforehand.
[1846]1202
[1192]1203 """
1204 if insitu is None: insitu = rcParams['insitu']
1205 self._math._setinsitu(insitu)
1206 varlist = vars()
[1579]1207 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1208 if not (unit == "" or base.has_key(unit)):
[1192]1209 raise ValueError("%s is not a valid unit." % unit)
[1859]1210 if unit == "":
1211 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1212 else:
1213 s = scantable(self._math._lag_flag(self, start*base[unit],
1214 end*base[unit], "frequency"))
[1192]1215 s._add_history("lag_flag", varlist)
1216 if insitu:
1217 self._assign(s)
1218 else:
1219 return s
[1001]1220
[1862]1221 @asaplog_post_dec
[2186]1222 def fft(self, rowno=[], mask=[], getrealimag=False):
[2177]1223 """\
1224 Apply FFT to the spectra.
1225 Flagged data in the scantable get interpolated over the region.
1226
1227 Parameters:
[2186]1228
1229 rowno: The row number(s) to be processed. int, list
1230 and tuple are accepted. By default ([]), FFT
1231 is applied to the whole data.
1232
1233 mask: Auxiliary channel mask(s). Given as a boolean
1234 list, it is applied to all specified rows.
1235 A list of boolean lists can also be used to
1236 apply different masks. In the latter case, the
1237 length of 'mask' must be the same as that of
1238 'rowno'. The default is [].
[2177]1239
1240 getrealimag: If True, returns the real and imaginary part
1241 values of the complex results.
1242 If False (the default), returns the amplitude
1243 (absolute value) normalised with Ndata/2 and
1244 phase (argument, in unit of radian).
1245
1246 Returns:
1247
[2186]1248 A list of dictionaries containing the results for each spectrum.
1249 Each dictionary contains two values, the real and the imaginary
1250 parts when getrealimag = True, or the amplitude(absolute value)
1251 and the phase(argument) when getrealimag = False. The key for
1252 these values are 'real' and 'imag', or 'ampl' and 'phase',
[2177]1253 respectively.
1254 """
1255 if isinstance(rowno, int):
1256 rowno = [rowno]
1257 elif not (isinstance(rowno, list) or isinstance(rowno, tuple)):
[2186]1258 raise TypeError("The row number(s) must be int, list or tuple.")
1259
1260 if len(rowno) == 0: rowno = [i for i in xrange(self.nrow())]
1261
1262 if not (isinstance(mask, list) or isinstance(mask, tuple)):
1263 raise TypeError("The mask must be a boolean list or a list of boolean list.")
1264 if len(mask) == 0: mask = [True for i in xrange(self.nchan())]
1265 if isinstance(mask[0], bool): mask = [mask]
1266 elif not (isinstance(mask[0], list) or isinstance(mask[0], tuple)):
1267 raise TypeError("The mask must be a boolean list or a list of boolean list.")
1268
1269 usecommonmask = (len(mask) == 1)
1270 if not usecommonmask:
1271 if len(mask) != len(rowno):
1272 raise ValueError("When specifying masks for each spectrum, the numbers of them must be identical.")
1273 for amask in mask:
1274 if len(amask) != self.nchan():
1275 raise ValueError("The spectra and the mask have different length.")
[2177]1276
[2186]1277 res = []
1278
1279 imask = 0
1280 for whichrow in rowno:
1281 fspec = self._fft(whichrow, mask[imask], getrealimag)
1282 nspec = len(fspec)
[2177]1283
[2186]1284 i=0
1285 v1=[]
1286 v2=[]
1287 reselem = {"real":[],"imag":[]} if getrealimag else {"ampl":[],"phase":[]}
[2177]1288
[2186]1289 while (i < nspec):
1290 v1.append(fspec[i])
1291 v2.append(fspec[i+1])
1292 i+=2
1293
[2177]1294 if getrealimag:
[2186]1295 reselem["real"] += v1
1296 reselem["imag"] += v2
[2177]1297 else:
[2186]1298 reselem["ampl"] += v1
1299 reselem["phase"] += v2
[2177]1300
[2186]1301 res.append(reselem)
1302
1303 if not usecommonmask: imask += 1
1304
[2177]1305 return res
1306
1307 @asaplog_post_dec
[113]1308 def create_mask(self, *args, **kwargs):
[1846]1309 """\
[1118]1310 Compute and return a mask based on [min, max] windows.
[189]1311 The specified windows are to be INCLUDED, when the mask is
[113]1312 applied.
[1846]1313
[102]1314 Parameters:
[1846]1315
[1118]1316 [min, max], [min2, max2], ...
[1024]1317 Pairs of start/end points (inclusive)specifying the regions
[102]1318 to be masked
[1855]1319
[189]1320 invert: optional argument. If specified as True,
1321 return an inverted mask, i.e. the regions
1322 specified are EXCLUDED
[1855]1323
[513]1324 row: create the mask using the specified row for
1325 unit conversions, default is row=0
1326 only necessary if frequency varies over rows.
[1846]1327
1328 Examples::
1329
[113]1330 scan.set_unit('channel')
[1846]1331 # a)
[1118]1332 msk = scan.create_mask([400, 500], [800, 900])
[189]1333 # masks everything outside 400 and 500
[113]1334 # and 800 and 900 in the unit 'channel'
1335
[1846]1336 # b)
[1118]1337 msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]1338 # masks the regions between 400 and 500
[113]1339 # and 800 and 900 in the unit 'channel'
[1846]1340
1341 # c)
1342 #mask only channel 400
[1554]1343 msk = scan.create_mask([400])
[1846]1344
[102]1345 """
[1554]1346 row = kwargs.get("row", 0)
[513]1347 data = self._getabcissa(row)
[113]1348 u = self._getcoordinfo()[0]
[1859]1349 if u == "":
1350 u = "channel"
1351 msg = "The current mask window unit is %s" % u
1352 i = self._check_ifs()
1353 if not i:
1354 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1355 asaplog.push(msg)
[102]1356 n = self.nchan()
[1295]1357 msk = _n_bools(n, False)
[710]1358 # test if args is a 'list' or a 'normal *args - UGLY!!!
1359
[1118]1360 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1361 and args or args[0]
[710]1362 for window in ws:
[1554]1363 if len(window) == 1:
1364 window = [window[0], window[0]]
1365 if len(window) == 0 or len(window) > 2:
1366 raise ValueError("A window needs to be defined as [start(, end)]")
[1545]1367 if window[0] > window[1]:
1368 tmp = window[0]
1369 window[0] = window[1]
1370 window[1] = tmp
[102]1371 for i in range(n):
[1024]1372 if data[i] >= window[0] and data[i] <= window[1]:
[1295]1373 msk[i] = True
[113]1374 if kwargs.has_key('invert'):
1375 if kwargs.get('invert'):
[1295]1376 msk = mask_not(msk)
[102]1377 return msk
[710]1378
[1931]1379 def get_masklist(self, mask=None, row=0, silent=False):
[1846]1380 """\
[1819]1381 Compute and return a list of mask windows, [min, max].
[1846]1382
[1819]1383 Parameters:
[1846]1384
[1819]1385 mask: channel mask, created with create_mask.
[1855]1386
[1819]1387 row: calcutate the masklist using the specified row
1388 for unit conversions, default is row=0
1389 only necessary if frequency varies over rows.
[1846]1390
[1819]1391 Returns:
[1846]1392
[1819]1393 [min, max], [min2, max2], ...
1394 Pairs of start/end points (inclusive)specifying
1395 the masked regions
[1846]1396
[1819]1397 """
1398 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1399 raise TypeError("The mask should be list or tuple.")
1400 if len(mask) < 2:
1401 raise TypeError("The mask elements should be > 1")
1402 if self.nchan() != len(mask):
1403 msg = "Number of channels in scantable != number of mask elements"
1404 raise TypeError(msg)
1405 data = self._getabcissa(row)
1406 u = self._getcoordinfo()[0]
[1859]1407 if u == "":
1408 u = "channel"
1409 msg = "The current mask window unit is %s" % u
1410 i = self._check_ifs()
1411 if not i:
1412 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
[1931]1413 if not silent:
1414 asaplog.push(msg)
[1819]1415 masklist=[]
1416 ist, ien = None, None
1417 ist, ien=self.get_mask_indices(mask)
1418 if ist is not None and ien is not None:
1419 for i in xrange(len(ist)):
1420 range=[data[ist[i]],data[ien[i]]]
1421 range.sort()
1422 masklist.append([range[0],range[1]])
1423 return masklist
1424
1425 def get_mask_indices(self, mask=None):
[1846]1426 """\
[1819]1427 Compute and Return lists of mask start indices and mask end indices.
[1855]1428
1429 Parameters:
1430
[1819]1431 mask: channel mask, created with create_mask.
[1846]1432
[1819]1433 Returns:
[1846]1434
[1819]1435 List of mask start indices and that of mask end indices,
1436 i.e., [istart1,istart2,....], [iend1,iend2,....].
[1846]1437
[1819]1438 """
1439 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1440 raise TypeError("The mask should be list or tuple.")
1441 if len(mask) < 2:
1442 raise TypeError("The mask elements should be > 1")
1443 istart=[]
1444 iend=[]
1445 if mask[0]: istart.append(0)
1446 for i in range(len(mask)-1):
1447 if not mask[i] and mask[i+1]:
1448 istart.append(i+1)
1449 elif mask[i] and not mask[i+1]:
1450 iend.append(i)
1451 if mask[len(mask)-1]: iend.append(len(mask)-1)
1452 if len(istart) != len(iend):
1453 raise RuntimeError("Numbers of mask start != mask end.")
1454 for i in range(len(istart)):
1455 if istart[i] > iend[i]:
1456 raise RuntimeError("Mask start index > mask end index")
1457 break
1458 return istart,iend
1459
[2013]1460 @asaplog_post_dec
1461 def parse_maskexpr(self,maskstring):
1462 """
1463 Parse CASA type mask selection syntax (IF dependent).
1464
1465 Parameters:
1466 maskstring : A string mask selection expression.
1467 A comma separated selections mean different IF -
1468 channel combinations. IFs and channel selections
1469 are partitioned by a colon, ':'.
1470 examples:
[2015]1471 '' = all IFs (all channels)
[2013]1472 '<2,4~6,9' = IFs 0,1,4,5,6,9 (all channels)
1473 '3:3~45;60' = channels 3 to 45 and 60 in IF 3
1474 '0~1:2~6,8' = channels 2 to 6 in IFs 0,1, and
1475 all channels in IF8
1476 Returns:
1477 A dictionary of selected (valid) IF and masklist pairs,
1478 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
1479 """
1480 if not isinstance(maskstring,str):
1481 asaplog.post()
1482 asaplog.push("Invalid mask expression")
1483 asaplog.post("ERROR")
1484
1485 valid_ifs = self.getifnos()
1486 frequnit = self.get_unit()
1487 seldict = {}
[2015]1488 if maskstring == "":
1489 maskstring = str(valid_ifs)[1:-1]
[2013]1490 ## split each selection
1491 sellist = maskstring.split(',')
1492 for currselstr in sellist:
1493 selset = currselstr.split(':')
1494 # spw and mask string (may include ~, < or >)
1495 spwmasklist = self._parse_selection(selset[0],typestr='integer',
1496 offset=1,minval=min(valid_ifs),
1497 maxval=max(valid_ifs))
1498 for spwlist in spwmasklist:
1499 selspws = []
1500 for ispw in range(spwlist[0],spwlist[1]+1):
1501 # Put into the list only if ispw exists
1502 if valid_ifs.count(ispw):
1503 selspws.append(ispw)
1504 del spwmasklist, spwlist
1505
1506 # parse frequency mask list
1507 if len(selset) > 1:
1508 freqmasklist = self._parse_selection(selset[1],typestr='float',
1509 offset=0.)
1510 else:
1511 # want to select the whole spectrum
1512 freqmasklist = [None]
1513
1514 ## define a dictionary of spw - masklist combination
1515 for ispw in selspws:
1516 #print "working on", ispw
1517 spwstr = str(ispw)
1518 if len(selspws) == 0:
1519 # empty spw
1520 continue
1521 else:
1522 ## want to get min and max of the spw and
1523 ## offset to set for '<' and '>'
1524 if frequnit == 'channel':
1525 minfreq = 0
1526 maxfreq = self.nchan(ifno=ispw)
1527 offset = 0.5
1528 else:
1529 ## This is ugly part. need improvement
1530 for ifrow in xrange(self.nrow()):
1531 if self.getif(ifrow) == ispw:
1532 #print "IF",ispw,"found in row =",ifrow
1533 break
1534 freqcoord = self.get_coordinate(ifrow)
1535 freqs = self._getabcissa(ifrow)
1536 minfreq = min(freqs)
1537 maxfreq = max(freqs)
1538 if len(freqs) == 1:
1539 offset = 0.5
1540 elif frequnit.find('Hz') > 0:
1541 offset = abs(freqcoord.to_frequency(1,unit=frequnit)
1542 -freqcoord.to_frequency(0,unit=frequnit))*0.5
1543 elif frequnit.find('m/s') > 0:
1544 offset = abs(freqcoord.to_velocity(1,unit=frequnit)
1545 -freqcoord.to_velocity(0,unit=frequnit))*0.5
1546 else:
1547 asaplog.post()
1548 asaplog.push("Invalid frequency unit")
1549 asaplog.post("ERROR")
1550 del freqs, freqcoord, ifrow
1551 for freq in freqmasklist:
1552 selmask = freq or [minfreq, maxfreq]
1553 if selmask[0] == None:
1554 ## selection was "<freq[1]".
1555 if selmask[1] < minfreq:
1556 ## avoid adding region selection
1557 selmask = None
1558 else:
1559 selmask = [minfreq,selmask[1]-offset]
1560 elif selmask[1] == None:
1561 ## selection was ">freq[0]"
1562 if selmask[0] > maxfreq:
1563 ## avoid adding region selection
1564 selmask = None
1565 else:
1566 selmask = [selmask[0]+offset,maxfreq]
1567 if selmask:
1568 if not seldict.has_key(spwstr):
1569 # new spw selection
1570 seldict[spwstr] = []
1571 seldict[spwstr] += [selmask]
1572 del minfreq,maxfreq,offset,freq,selmask
1573 del spwstr
1574 del freqmasklist
1575 del valid_ifs
1576 if len(seldict) == 0:
1577 asaplog.post()
1578 asaplog.push("No valid selection in the mask expression: "+maskstring)
1579 asaplog.post("WARN")
1580 return None
1581 msg = "Selected masklist:\n"
1582 for sif, lmask in seldict.iteritems():
1583 msg += " IF"+sif+" - "+str(lmask)+"\n"
1584 asaplog.push(msg)
1585 return seldict
1586
1587 def _parse_selection(self,selstr,typestr='float',offset=0.,minval=None,maxval=None):
1588 """
1589 Parameters:
1590 selstr : The Selection string, e.g., '<3;5~7;100~103;9'
1591 typestr : The type of the values in returned list
1592 ('integer' or 'float')
1593 offset : The offset value to subtract from or add to
1594 the boundary value if the selection string
1595 includes '<' or '>'
1596 minval, maxval : The minimum/maximum values to set if the
1597 selection string includes '<' or '>'.
1598 The list element is filled with None by default.
1599 Returns:
1600 A list of min/max pair of selections.
1601 Example:
1602 _parseSelection('<3;5~7;9',typestr='int',offset=1,minval=0)
1603 returns [[0,2],[5,7],[9,9]]
1604 """
1605 selgroups = selstr.split(';')
1606 sellists = []
1607 if typestr.lower().startswith('int'):
1608 formatfunc = int
1609 else:
1610 formatfunc = float
1611
1612 for currsel in selgroups:
1613 if currsel.find('~') > 0:
1614 minsel = formatfunc(currsel.split('~')[0].strip())
1615 maxsel = formatfunc(currsel.split('~')[1].strip())
1616 elif currsel.strip().startswith('<'):
1617 minsel = minval
1618 maxsel = formatfunc(currsel.split('<')[1].strip()) \
1619 - formatfunc(offset)
1620 elif currsel.strip().startswith('>'):
1621 minsel = formatfunc(currsel.split('>')[1].strip()) \
1622 + formatfunc(offset)
1623 maxsel = maxval
1624 else:
1625 minsel = formatfunc(currsel)
1626 maxsel = formatfunc(currsel)
1627 sellists.append([minsel,maxsel])
1628 return sellists
1629
[1819]1630# def get_restfreqs(self):
1631# """
1632# Get the restfrequency(s) stored in this scantable.
1633# The return value(s) are always of unit 'Hz'
1634# Parameters:
1635# none
1636# Returns:
1637# a list of doubles
1638# """
1639# return list(self._getrestfreqs())
1640
1641 def get_restfreqs(self, ids=None):
[1846]1642 """\
[256]1643 Get the restfrequency(s) stored in this scantable.
1644 The return value(s) are always of unit 'Hz'
[1846]1645
[256]1646 Parameters:
[1846]1647
[1819]1648 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1649 be retrieved
[1846]1650
[256]1651 Returns:
[1846]1652
[1819]1653 dictionary containing ids and a list of doubles for each id
[1846]1654
[256]1655 """
[1819]1656 if ids is None:
1657 rfreqs={}
1658 idlist = self.getmolnos()
1659 for i in idlist:
1660 rfreqs[i]=list(self._getrestfreqs(i))
1661 return rfreqs
1662 else:
1663 if type(ids)==list or type(ids)==tuple:
1664 rfreqs={}
1665 for i in ids:
1666 rfreqs[i]=list(self._getrestfreqs(i))
1667 return rfreqs
1668 else:
1669 return list(self._getrestfreqs(ids))
1670 #return list(self._getrestfreqs(ids))
[102]1671
[931]1672 def set_restfreqs(self, freqs=None, unit='Hz'):
[1846]1673 """\
[931]1674 Set or replace the restfrequency specified and
[1938]1675 if the 'freqs' argument holds a scalar,
[931]1676 then that rest frequency will be applied to all the selected
1677 data. If the 'freqs' argument holds
1678 a vector, then it MUST be of equal or smaller length than
1679 the number of IFs (and the available restfrequencies will be
1680 replaced by this vector). In this case, *all* data have
1681 the restfrequency set per IF according
1682 to the corresponding value you give in the 'freqs' vector.
[1118]1683 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
[931]1684 IF 1 gets restfreq 2e9.
[1846]1685
[1395]1686 You can also specify the frequencies via a linecatalog.
[1153]1687
[931]1688 Parameters:
[1846]1689
[931]1690 freqs: list of rest frequency values or string idenitfiers
[1855]1691
[931]1692 unit: unit for rest frequency (default 'Hz')
[402]1693
[1846]1694
1695 Example::
1696
[1819]1697 # set the given restfrequency for the all currently selected IFs
[931]1698 scan.set_restfreqs(freqs=1.4e9)
[1845]1699 # set restfrequencies for the n IFs (n > 1) in the order of the
1700 # list, i.e
1701 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
1702 # len(list_of_restfreqs) == nIF
1703 # for nIF == 1 the following will set multiple restfrequency for
1704 # that IF
[1819]1705 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
[1845]1706 # set multiple restfrequencies per IF. as a list of lists where
1707 # the outer list has nIF elements, the inner s arbitrary
1708 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
[391]1709
[1846]1710 *Note*:
[1845]1711
[931]1712 To do more sophisticate Restfrequency setting, e.g. on a
1713 source and IF basis, use scantable.set_selection() before using
[1846]1714 this function::
[931]1715
[1846]1716 # provided your scantable is called scan
1717 selection = selector()
1718 selection.set_name("ORION*")
1719 selection.set_ifs([1])
1720 scan.set_selection(selection)
1721 scan.set_restfreqs(freqs=86.6e9)
1722
[931]1723 """
1724 varlist = vars()
[1157]1725 from asap import linecatalog
1726 # simple value
[1118]1727 if isinstance(freqs, int) or isinstance(freqs, float):
[1845]1728 self._setrestfreqs([freqs], [""], unit)
[1157]1729 # list of values
[1118]1730 elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]1731 # list values are scalars
[1118]1732 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[1845]1733 if len(freqs) == 1:
1734 self._setrestfreqs(freqs, [""], unit)
1735 else:
1736 # allow the 'old' mode of setting mulitple IFs
1737 sel = selector()
1738 savesel = self._getselection()
1739 iflist = self.getifnos()
1740 if len(freqs)>len(iflist):
1741 raise ValueError("number of elements in list of list "
1742 "exeeds the current IF selections")
1743 iflist = self.getifnos()
1744 for i, fval in enumerate(freqs):
1745 sel.set_ifs(iflist[i])
1746 self._setselection(sel)
1747 self._setrestfreqs([fval], [""], unit)
1748 self._setselection(savesel)
1749
1750 # list values are dict, {'value'=, 'name'=)
[1157]1751 elif isinstance(freqs[-1], dict):
[1845]1752 values = []
1753 names = []
1754 for d in freqs:
1755 values.append(d["value"])
1756 names.append(d["name"])
1757 self._setrestfreqs(values, names, unit)
[1819]1758 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
[1157]1759 sel = selector()
1760 savesel = self._getselection()
[1322]1761 iflist = self.getifnos()
[1819]1762 if len(freqs)>len(iflist):
[1845]1763 raise ValueError("number of elements in list of list exeeds"
1764 " the current IF selections")
1765 for i, fval in enumerate(freqs):
[1322]1766 sel.set_ifs(iflist[i])
[1259]1767 self._setselection(sel)
[1845]1768 self._setrestfreqs(fval, [""], unit)
[1157]1769 self._setselection(savesel)
1770 # freqs are to be taken from a linecatalog
[1153]1771 elif isinstance(freqs, linecatalog):
1772 sel = selector()
1773 savesel = self._getselection()
1774 for i in xrange(freqs.nrow()):
[1322]1775 sel.set_ifs(iflist[i])
[1153]1776 self._setselection(sel)
[1845]1777 self._setrestfreqs([freqs.get_frequency(i)],
1778 [freqs.get_name(i)], "MHz")
[1153]1779 # ensure that we are not iterating past nIF
1780 if i == self.nif()-1: break
1781 self._setselection(savesel)
[931]1782 else:
1783 return
1784 self._add_history("set_restfreqs", varlist)
1785
[1360]1786 def shift_refpix(self, delta):
[1846]1787 """\
[1589]1788 Shift the reference pixel of the Spectra Coordinate by an
1789 integer amount.
[1846]1790
[1589]1791 Parameters:
[1846]1792
[1589]1793 delta: the amount to shift by
[1846]1794
1795 *Note*:
1796
[1589]1797 Be careful using this with broadband data.
[1846]1798
[1360]1799 """
[1731]1800 Scantable.shift_refpix(self, delta)
[931]1801
[1862]1802 @asaplog_post_dec
[1259]1803 def history(self, filename=None):
[1846]1804 """\
[1259]1805 Print the history. Optionally to a file.
[1846]1806
[1348]1807 Parameters:
[1846]1808
[1928]1809 filename: The name of the file to save the history to.
[1846]1810
[1259]1811 """
[484]1812 hist = list(self._gethistory())
[794]1813 out = "-"*80
[484]1814 for h in hist:
[489]1815 if h.startswith("---"):
[1857]1816 out = "\n".join([out, h])
[489]1817 else:
1818 items = h.split("##")
1819 date = items[0]
1820 func = items[1]
1821 items = items[2:]
[794]1822 out += "\n"+date+"\n"
1823 out += "Function: %s\n Parameters:" % (func)
[489]1824 for i in items:
[1938]1825 if i == '':
1826 continue
[489]1827 s = i.split("=")
[1118]1828 out += "\n %s = %s" % (s[0], s[1])
[1857]1829 out = "\n".join([out, "-"*80])
[1259]1830 if filename is not None:
1831 if filename is "":
1832 filename = 'scantable_history.txt'
1833 import os
1834 filename = os.path.expandvars(os.path.expanduser(filename))
1835 if not os.path.isdir(filename):
1836 data = open(filename, 'w')
1837 data.write(out)
1838 data.close()
1839 else:
1840 msg = "Illegal file name '%s'." % (filename)
[1859]1841 raise IOError(msg)
1842 return page(out)
[513]1843 #
1844 # Maths business
1845 #
[1862]1846 @asaplog_post_dec
[931]1847 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
[1846]1848 """\
[1070]1849 Return the (time) weighted average of a scan.
[1846]1850
1851 *Note*:
1852
[1070]1853 in channels only - align if necessary
[1846]1854
[513]1855 Parameters:
[1846]1856
[513]1857 mask: an optional mask (only used for 'var' and 'tsys'
1858 weighting)
[1855]1859
[558]1860 scanav: True averages each scan separately
1861 False (default) averages all scans together,
[1855]1862
[1099]1863 weight: Weighting scheme.
1864 'none' (mean no weight)
1865 'var' (1/var(spec) weighted)
1866 'tsys' (1/Tsys**2 weighted)
1867 'tint' (integration time weighted)
1868 'tintsys' (Tint/Tsys**2)
1869 'median' ( median averaging)
[535]1870 The default is 'tint'
[1855]1871
[931]1872 align: align the spectra in velocity before averaging. It takes
1873 the time of the first spectrum as reference time.
[1846]1874
1875 Example::
1876
[513]1877 # time average the scantable without using a mask
[710]1878 newscan = scan.average_time()
[1846]1879
[513]1880 """
1881 varlist = vars()
[1593]1882 weight = weight or 'TINT'
1883 mask = mask or ()
1884 scanav = (scanav and 'SCAN') or 'NONE'
[1118]1885 scan = (self, )
[1859]1886
1887 if align:
1888 scan = (self.freq_align(insitu=False), )
1889 s = None
1890 if weight.upper() == 'MEDIAN':
1891 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1892 scanav))
1893 else:
1894 s = scantable(self._math._average(scan, mask, weight.upper(),
1895 scanav))
[1099]1896 s._add_history("average_time", varlist)
[513]1897 return s
[710]1898
[1862]1899 @asaplog_post_dec
[876]1900 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[1846]1901 """\
[513]1902 Return a scan where all spectra are converted to either
1903 Jansky or Kelvin depending upon the flux units of the scan table.
1904 By default the function tries to look the values up internally.
1905 If it can't find them (or if you want to over-ride), you must
1906 specify EITHER jyperk OR eta (and D which it will try to look up
1907 also if you don't set it). jyperk takes precedence if you set both.
[1846]1908
[513]1909 Parameters:
[1846]1910
[513]1911 jyperk: the Jy / K conversion factor
[1855]1912
[513]1913 eta: the aperture efficiency
[1855]1914
[1928]1915 d: the geometric diameter (metres)
[1855]1916
[513]1917 insitu: if False a new scantable is returned.
1918 Otherwise, the scaling is done in-situ
1919 The default is taken from .asaprc (False)
[1846]1920
[513]1921 """
1922 if insitu is None: insitu = rcParams['insitu']
[876]1923 self._math._setinsitu(insitu)
[513]1924 varlist = vars()
[1593]1925 jyperk = jyperk or -1.0
1926 d = d or -1.0
1927 eta = eta or -1.0
[876]1928 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1929 s._add_history("convert_flux", varlist)
1930 if insitu: self._assign(s)
1931 else: return s
[513]1932
[1862]1933 @asaplog_post_dec
[876]1934 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[1846]1935 """\
[513]1936 Return a scan after applying a gain-elevation correction.
1937 The correction can be made via either a polynomial or a
1938 table-based interpolation (and extrapolation if necessary).
1939 You specify polynomial coefficients, an ascii table or neither.
1940 If you specify neither, then a polynomial correction will be made
1941 with built in coefficients known for certain telescopes (an error
1942 will occur if the instrument is not known).
1943 The data and Tsys are *divided* by the scaling factors.
[1846]1944
[513]1945 Parameters:
[1846]1946
[513]1947 poly: Polynomial coefficients (default None) to compute a
1948 gain-elevation correction as a function of
1949 elevation (in degrees).
[1855]1950
[513]1951 filename: The name of an ascii file holding correction factors.
1952 The first row of the ascii file must give the column
1953 names and these MUST include columns
1954 "ELEVATION" (degrees) and "FACTOR" (multiply data
1955 by this) somewhere.
1956 The second row must give the data type of the
1957 column. Use 'R' for Real and 'I' for Integer.
1958 An example file would be
1959 (actual factors are arbitrary) :
1960
1961 TIME ELEVATION FACTOR
1962 R R R
1963 0.1 0 0.8
1964 0.2 20 0.85
1965 0.3 40 0.9
1966 0.4 60 0.85
1967 0.5 80 0.8
1968 0.6 90 0.75
[1855]1969
[513]1970 method: Interpolation method when correcting from a table.
1971 Values are "nearest", "linear" (default), "cubic"
1972 and "spline"
[1855]1973
[513]1974 insitu: if False a new scantable is returned.
1975 Otherwise, the scaling is done in-situ
1976 The default is taken from .asaprc (False)
[1846]1977
[513]1978 """
1979
1980 if insitu is None: insitu = rcParams['insitu']
[876]1981 self._math._setinsitu(insitu)
[513]1982 varlist = vars()
[1593]1983 poly = poly or ()
[513]1984 from os.path import expandvars
1985 filename = expandvars(filename)
[876]1986 s = scantable(self._math._gainel(self, poly, filename, method))
1987 s._add_history("gain_el", varlist)
[1593]1988 if insitu:
1989 self._assign(s)
1990 else:
1991 return s
[710]1992
[1862]1993 @asaplog_post_dec
[931]1994 def freq_align(self, reftime=None, method='cubic', insitu=None):
[1846]1995 """\
[513]1996 Return a scan where all rows have been aligned in frequency/velocity.
1997 The alignment frequency frame (e.g. LSRK) is that set by function
1998 set_freqframe.
[1846]1999
[513]2000 Parameters:
[1855]2001
[513]2002 reftime: reference time to align at. By default, the time of
2003 the first row of data is used.
[1855]2004
[513]2005 method: Interpolation method for regridding the spectra.
2006 Choose from "nearest", "linear", "cubic" (default)
2007 and "spline"
[1855]2008
[513]2009 insitu: if False a new scantable is returned.
2010 Otherwise, the scaling is done in-situ
2011 The default is taken from .asaprc (False)
[1846]2012
[513]2013 """
[931]2014 if insitu is None: insitu = rcParams["insitu"]
[876]2015 self._math._setinsitu(insitu)
[513]2016 varlist = vars()
[1593]2017 reftime = reftime or ""
[931]2018 s = scantable(self._math._freq_align(self, reftime, method))
[876]2019 s._add_history("freq_align", varlist)
2020 if insitu: self._assign(s)
2021 else: return s
[513]2022
[1862]2023 @asaplog_post_dec
[1725]2024 def opacity(self, tau=None, insitu=None):
[1846]2025 """\
[513]2026 Apply an opacity correction. The data
2027 and Tsys are multiplied by the correction factor.
[1846]2028
[513]2029 Parameters:
[1855]2030
[1689]2031 tau: (list of) opacity from which the correction factor is
[513]2032 exp(tau*ZD)
[1689]2033 where ZD is the zenith-distance.
2034 If a list is provided, it has to be of length nIF,
2035 nIF*nPol or 1 and in order of IF/POL, e.g.
2036 [opif0pol0, opif0pol1, opif1pol0 ...]
[1725]2037 if tau is `None` the opacities are determined from a
2038 model.
[1855]2039
[513]2040 insitu: if False a new scantable is returned.
2041 Otherwise, the scaling is done in-situ
2042 The default is taken from .asaprc (False)
[1846]2043
[513]2044 """
2045 if insitu is None: insitu = rcParams['insitu']
[876]2046 self._math._setinsitu(insitu)
[513]2047 varlist = vars()
[1689]2048 if not hasattr(tau, "__len__"):
2049 tau = [tau]
[876]2050 s = scantable(self._math._opacity(self, tau))
2051 s._add_history("opacity", varlist)
2052 if insitu: self._assign(s)
2053 else: return s
[513]2054
[1862]2055 @asaplog_post_dec
[513]2056 def bin(self, width=5, insitu=None):
[1846]2057 """\
[513]2058 Return a scan where all spectra have been binned up.
[1846]2059
[1348]2060 Parameters:
[1846]2061
[513]2062 width: The bin width (default=5) in pixels
[1855]2063
[513]2064 insitu: if False a new scantable is returned.
2065 Otherwise, the scaling is done in-situ
2066 The default is taken from .asaprc (False)
[1846]2067
[513]2068 """
2069 if insitu is None: insitu = rcParams['insitu']
[876]2070 self._math._setinsitu(insitu)
[513]2071 varlist = vars()
[876]2072 s = scantable(self._math._bin(self, width))
[1118]2073 s._add_history("bin", varlist)
[1589]2074 if insitu:
2075 self._assign(s)
2076 else:
2077 return s
[513]2078
[1862]2079 @asaplog_post_dec
[513]2080 def resample(self, width=5, method='cubic', insitu=None):
[1846]2081 """\
[1348]2082 Return a scan where all spectra have been binned up.
[1573]2083
[1348]2084 Parameters:
[1846]2085
[513]2086 width: The bin width (default=5) in pixels
[1855]2087
[513]2088 method: Interpolation method when correcting from a table.
2089 Values are "nearest", "linear", "cubic" (default)
2090 and "spline"
[1855]2091
[513]2092 insitu: if False a new scantable is returned.
2093 Otherwise, the scaling is done in-situ
2094 The default is taken from .asaprc (False)
[1846]2095
[513]2096 """
2097 if insitu is None: insitu = rcParams['insitu']
[876]2098 self._math._setinsitu(insitu)
[513]2099 varlist = vars()
[876]2100 s = scantable(self._math._resample(self, method, width))
[1118]2101 s._add_history("resample", varlist)
[876]2102 if insitu: self._assign(s)
2103 else: return s
[513]2104
[1862]2105 @asaplog_post_dec
[946]2106 def average_pol(self, mask=None, weight='none'):
[1846]2107 """\
[946]2108 Average the Polarisations together.
[1846]2109
[946]2110 Parameters:
[1846]2111
[946]2112 mask: An optional mask defining the region, where the
2113 averaging will be applied. The output will have all
2114 specified points masked.
[1855]2115
[946]2116 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2117 weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]2118
[946]2119 """
2120 varlist = vars()
[1593]2121 mask = mask or ()
[1010]2122 s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]2123 s._add_history("average_pol", varlist)
[992]2124 return s
[513]2125
[1862]2126 @asaplog_post_dec
[1145]2127 def average_beam(self, mask=None, weight='none'):
[1846]2128 """\
[1145]2129 Average the Beams together.
[1846]2130
[1145]2131 Parameters:
2132 mask: An optional mask defining the region, where the
2133 averaging will be applied. The output will have all
2134 specified points masked.
[1855]2135
[1145]2136 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2137 weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]2138
[1145]2139 """
2140 varlist = vars()
[1593]2141 mask = mask or ()
[1145]2142 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
2143 s._add_history("average_beam", varlist)
2144 return s
2145
[1586]2146 def parallactify(self, pflag):
[1846]2147 """\
[1843]2148 Set a flag to indicate whether this data should be treated as having
[1617]2149 been 'parallactified' (total phase == 0.0)
[1846]2150
[1617]2151 Parameters:
[1855]2152
[1843]2153 pflag: Bool indicating whether to turn this on (True) or
[1617]2154 off (False)
[1846]2155
[1617]2156 """
[1586]2157 varlist = vars()
2158 self._parallactify(pflag)
2159 self._add_history("parallactify", varlist)
2160
[1862]2161 @asaplog_post_dec
[992]2162 def convert_pol(self, poltype=None):
[1846]2163 """\
[992]2164 Convert the data to a different polarisation type.
[1565]2165 Note that you will need cross-polarisation terms for most conversions.
[1846]2166
[992]2167 Parameters:
[1855]2168
[992]2169 poltype: The new polarisation type. Valid types are:
[1565]2170 "linear", "circular", "stokes" and "linpol"
[1846]2171
[992]2172 """
2173 varlist = vars()
[1859]2174 s = scantable(self._math._convertpol(self, poltype))
[1118]2175 s._add_history("convert_pol", varlist)
[992]2176 return s
2177
[1862]2178 @asaplog_post_dec
[2269]2179 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False,
2180 insitu=None):
[1846]2181 """\
[513]2182 Smooth the spectrum by the specified kernel (conserving flux).
[1846]2183
[513]2184 Parameters:
[1846]2185
[513]2186 kernel: The type of smoothing kernel. Select from
[1574]2187 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
2188 or 'poly'
[1855]2189
[513]2190 width: The width of the kernel in pixels. For hanning this is
2191 ignored otherwise it defauls to 5 pixels.
2192 For 'gaussian' it is the Full Width Half
2193 Maximum. For 'boxcar' it is the full width.
[1574]2194 For 'rmedian' and 'poly' it is the half width.
[1855]2195
[1574]2196 order: Optional parameter for 'poly' kernel (default is 2), to
2197 specify the order of the polnomial. Ignored by all other
2198 kernels.
[1855]2199
[1819]2200 plot: plot the original and the smoothed spectra.
2201 In this each indivual fit has to be approved, by
2202 typing 'y' or 'n'
[1855]2203
[513]2204 insitu: if False a new scantable is returned.
2205 Otherwise, the scaling is done in-situ
2206 The default is taken from .asaprc (False)
[1846]2207
[513]2208 """
2209 if insitu is None: insitu = rcParams['insitu']
[876]2210 self._math._setinsitu(insitu)
[513]2211 varlist = vars()
[1819]2212
2213 if plot: orgscan = self.copy()
2214
[1574]2215 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
[876]2216 s._add_history("smooth", varlist)
[1819]2217
2218 if plot:
[2150]2219 from asap.asapplotter import new_asaplot
2220 theplot = new_asaplot(rcParams['plotter.gui'])
2221 theplot.set_panels()
[1819]2222 ylab=s._get_ordinate_label()
[2150]2223 #theplot.palette(0,["#777777","red"])
[1819]2224 for r in xrange(s.nrow()):
2225 xsm=s._getabcissa(r)
2226 ysm=s._getspectrum(r)
2227 xorg=orgscan._getabcissa(r)
2228 yorg=orgscan._getspectrum(r)
[2150]2229 theplot.clear()
2230 theplot.hold()
2231 theplot.set_axes('ylabel',ylab)
2232 theplot.set_axes('xlabel',s._getabcissalabel(r))
2233 theplot.set_axes('title',s._getsourcename(r))
2234 theplot.set_line(label='Original',color="#777777")
2235 theplot.plot(xorg,yorg)
2236 theplot.set_line(label='Smoothed',color="red")
2237 theplot.plot(xsm,ysm)
[1819]2238 ### Ugly part for legend
2239 for i in [0,1]:
[2150]2240 theplot.subplots[0]['lines'].append([theplot.subplots[0]['axes'].lines[i]])
2241 theplot.release()
[1819]2242 ### Ugly part for legend
[2150]2243 theplot.subplots[0]['lines']=[]
[1819]2244 res = raw_input("Accept smoothing ([y]/n): ")
2245 if res.upper() == 'N':
2246 s._setspectrum(yorg, r)
[2150]2247 theplot.quit()
2248 del theplot
[1819]2249 del orgscan
2250
[876]2251 if insitu: self._assign(s)
2252 else: return s
[513]2253
[2186]2254 @asaplog_post_dec
2255 def _parse_wn(self, wn):
2256 if isinstance(wn, list) or isinstance(wn, tuple):
2257 return wn
2258 elif isinstance(wn, int):
2259 return [ wn ]
2260 elif isinstance(wn, str):
[2277]2261 if '-' in wn: # case 'a-b' : return [a,a+1,...,b-1,b]
[2186]2262 val = wn.split('-')
2263 val = [int(val[0]), int(val[1])]
2264 val.sort()
2265 res = [i for i in xrange(val[0], val[1]+1)]
[2277]2266 elif wn[:2] == '<=' or wn[:2] == '=<': # cases '<=a','=<a' : return [0,1,...,a-1,a]
[2186]2267 val = int(wn[2:])+1
2268 res = [i for i in xrange(val)]
[2277]2269 elif wn[-2:] == '>=' or wn[-2:] == '=>': # cases 'a>=','a=>' : return [0,1,...,a-1,a]
[2186]2270 val = int(wn[:-2])+1
2271 res = [i for i in xrange(val)]
[2277]2272 elif wn[0] == '<': # case '<a' : return [0,1,...,a-2,a-1]
[2186]2273 val = int(wn[1:])
2274 res = [i for i in xrange(val)]
[2277]2275 elif wn[-1] == '>': # case 'a>' : return [0,1,...,a-2,a-1]
[2186]2276 val = int(wn[:-1])
2277 res = [i for i in xrange(val)]
[2277]2278 elif wn[:2] == '>=' or wn[:2] == '=>': # cases '>=a','=>a' : return [a,a+1,...,a_nyq]
[2186]2279 val = int(wn[2:])
2280 res = [i for i in xrange(val, self.nchan()/2+1)]
[2277]2281 elif wn[-2:] == '<=' or wn[-2:] == '=<': # cases 'a<=','a=<' : return [a,a+1,...,a_nyq]
[2186]2282 val = int(wn[:-2])
2283 res = [i for i in xrange(val, self.nchan()/2+1)]
[2277]2284 elif wn[0] == '>': # case '>a' : return [a+1,a+2,...,a_nyq]
[2186]2285 val = int(wn[1:])+1
2286 res = [i for i in xrange(val, self.nchan()/2+1)]
[2277]2287 elif wn[-1] == '<': # case 'a<' : return [a+1,a+2,...,a_nyq]
[2186]2288 val = int(wn[:-1])+1
2289 res = [i for i in xrange(val, self.nchan()/2+1)]
[2012]2290
[2186]2291 return res
2292 else:
2293 msg = 'wrong value given for addwn/rejwn'
2294 raise RuntimeError(msg)
2295
2296
[1862]2297 @asaplog_post_dec
[2277]2298 def sinusoid_baseline(self, insitu=None, mask=None, applyfft=None,
[2269]2299 fftmethod=None, fftthresh=None,
2300 addwn=None, rejwn=None, clipthresh=None,
2301 clipniter=None, plot=None,
2302 getresidual=None, showprogress=None,
2303 minnrow=None, outlog=None, blfile=None):
[2047]2304 """\
[2094]2305 Return a scan which has been baselined (all rows) with sinusoidal functions.
[2047]2306 Parameters:
[2186]2307 insitu: if False a new scantable is returned.
[2081]2308 Otherwise, the scaling is done in-situ
2309 The default is taken from .asaprc (False)
[2186]2310 mask: an optional mask
2311 applyfft: if True use some method, such as FFT, to find
2312 strongest sinusoidal components in the wavenumber
2313 domain to be used for baseline fitting.
2314 default is True.
2315 fftmethod: method to find the strong sinusoidal components.
2316 now only 'fft' is available and it is the default.
2317 fftthresh: the threshold to select wave numbers to be used for
2318 fitting from the distribution of amplitudes in the
2319 wavenumber domain.
2320 both float and string values accepted.
2321 given a float value, the unit is set to sigma.
2322 for string values, allowed formats include:
2323 'xsigma' or 'x' (= x-sigma level. e.g., '3sigma'), or
2324 'topx' (= the x strongest ones, e.g. 'top5').
2325 default is 3.0 (unit: sigma).
2326 addwn: the additional wave numbers to be used for fitting.
2327 list or integer value is accepted to specify every
2328 wave numbers. also string value can be used in case
2329 you need to specify wave numbers in a certain range,
2330 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
2331 '<a' (= 0,1,...,a-2,a-1),
2332 '>=a' (= a, a+1, ... up to the maximum wave
2333 number corresponding to the Nyquist
2334 frequency for the case of FFT).
2335 default is [].
2336 rejwn: the wave numbers NOT to be used for fitting.
2337 can be set just as addwn but has higher priority:
2338 wave numbers which are specified both in addwn
2339 and rejwn will NOT be used. default is [].
[2081]2340 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
[2129]2341 clipniter: maximum number of iteration of 'clipthresh'-sigma clipping (default is 0)
[2081]2342 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2343 plot the fit and the residual. In this each
2344 indivual fit has to be approved, by typing 'y'
2345 or 'n'
2346 getresidual: if False, returns best-fit values instead of
2347 residual. (default is True)
[2189]2348 showprogress: show progress status for large data.
2349 default is True.
2350 minnrow: minimum number of input spectra to show.
2351 default is 1000.
[2081]2352 outlog: Output the coefficients of the best-fit
2353 function to logger (default is False)
2354 blfile: Name of a text file in which the best-fit
2355 parameter values to be written
[2186]2356 (default is '': no file/logger output)
[2047]2357
2358 Example:
2359 # return a scan baselined by a combination of sinusoidal curves having
[2081]2360 # wave numbers in spectral window up to 10,
[2047]2361 # also with 3-sigma clipping, iteration up to 4 times
[2186]2362 bscan = scan.sinusoid_baseline(addwn='<=10',clipthresh=3.0,clipniter=4)
[2081]2363
2364 Note:
2365 The best-fit parameter values output in logger and/or blfile are now
2366 based on specunit of 'channel'.
[2047]2367 """
2368
[2186]2369 try:
2370 varlist = vars()
[2047]2371
[2186]2372 if insitu is None: insitu = rcParams['insitu']
2373 if insitu:
2374 workscan = self
2375 else:
2376 workscan = self.copy()
2377
[2277]2378 if mask is None: mask = [True for i in xrange(workscan.nchan())]
[2186]2379 if applyfft is None: applyfft = True
2380 if fftmethod is None: fftmethod = 'fft'
2381 if fftthresh is None: fftthresh = 3.0
2382 if addwn is None: addwn = []
2383 if rejwn is None: rejwn = []
2384 if clipthresh is None: clipthresh = 3.0
2385 if clipniter is None: clipniter = 0
2386 if plot is None: plot = False
2387 if getresidual is None: getresidual = True
[2189]2388 if showprogress is None: showprogress = True
2389 if minnrow is None: minnrow = 1000
[2186]2390 if outlog is None: outlog = False
2391 if blfile is None: blfile = ''
[2047]2392
[2081]2393 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
[2277]2394 workscan._sinusoid_baseline(mask, applyfft, fftmethod.lower(), str(fftthresh).lower(), workscan._parse_wn(addwn), workscan._parse_wn(rejwn), clipthresh, clipniter, getresidual, pack_progress_params(showprogress, minnrow), outlog, blfile)
[2186]2395 workscan._add_history('sinusoid_baseline', varlist)
[2047]2396
2397 if insitu:
2398 self._assign(workscan)
2399 else:
2400 return workscan
2401
2402 except RuntimeError, e:
[2186]2403 raise_fitting_failure_exception(e)
[2047]2404
2405
[2186]2406 @asaplog_post_dec
[2277]2407 def auto_sinusoid_baseline(self, insitu=None, mask=None, applyfft=None, fftmethod=None, fftthresh=None,
2408 addwn=None, rejwn=None, clipthresh=None, clipniter=None, edge=None, threshold=None,
2409 chan_avg_limit=None, plot=None, getresidual=None, showprogress=None, minnrow=None,
[2189]2410 outlog=None, blfile=None):
[2047]2411 """\
[2277]2412 Return a scan which has been baselined (all rows) with sinusoidal functions.
[2047]2413 Spectral lines are detected first using linefinder and masked out
2414 to avoid them affecting the baseline solution.
2415
2416 Parameters:
[2189]2417 insitu: if False a new scantable is returned.
2418 Otherwise, the scaling is done in-situ
2419 The default is taken from .asaprc (False)
2420 mask: an optional mask retreived from scantable
2421 applyfft: if True use some method, such as FFT, to find
2422 strongest sinusoidal components in the wavenumber
2423 domain to be used for baseline fitting.
2424 default is True.
2425 fftmethod: method to find the strong sinusoidal components.
2426 now only 'fft' is available and it is the default.
2427 fftthresh: the threshold to select wave numbers to be used for
2428 fitting from the distribution of amplitudes in the
2429 wavenumber domain.
2430 both float and string values accepted.
2431 given a float value, the unit is set to sigma.
2432 for string values, allowed formats include:
[2277]2433 'xsigma' or 'x' (= x-sigma level. e.g., '3sigma'), or
[2189]2434 'topx' (= the x strongest ones, e.g. 'top5').
2435 default is 3.0 (unit: sigma).
2436 addwn: the additional wave numbers to be used for fitting.
2437 list or integer value is accepted to specify every
2438 wave numbers. also string value can be used in case
2439 you need to specify wave numbers in a certain range,
2440 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
2441 '<a' (= 0,1,...,a-2,a-1),
2442 '>=a' (= a, a+1, ... up to the maximum wave
2443 number corresponding to the Nyquist
2444 frequency for the case of FFT).
2445 default is [].
2446 rejwn: the wave numbers NOT to be used for fitting.
2447 can be set just as addwn but has higher priority:
2448 wave numbers which are specified both in addwn
2449 and rejwn will NOT be used. default is [].
2450 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
[2277]2451 clipniter: maximum number of iteration of 'clipthresh'-sigma clipping (default is 0)
[2189]2452 edge: an optional number of channel to drop at
2453 the edge of spectrum. If only one value is
2454 specified, the same number will be dropped
2455 from both sides of the spectrum. Default
2456 is to keep all channels. Nested tuples
2457 represent individual edge selection for
2458 different IFs (a number of spectral channels
2459 can be different)
2460 threshold: the threshold used by line finder. It is
2461 better to keep it large as only strong lines
2462 affect the baseline solution.
2463 chan_avg_limit: a maximum number of consequtive spectral
2464 channels to average during the search of
2465 weak and broad lines. The default is no
2466 averaging (and no search for weak lines).
2467 If such lines can affect the fitted baseline
2468 (e.g. a high order polynomial is fitted),
2469 increase this parameter (usually values up
2470 to 8 are reasonable). Most users of this
2471 method should find the default value sufficient.
2472 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2473 plot the fit and the residual. In this each
2474 indivual fit has to be approved, by typing 'y'
2475 or 'n'
2476 getresidual: if False, returns best-fit values instead of
2477 residual. (default is True)
2478 showprogress: show progress status for large data.
2479 default is True.
2480 minnrow: minimum number of input spectra to show.
2481 default is 1000.
2482 outlog: Output the coefficients of the best-fit
2483 function to logger (default is False)
2484 blfile: Name of a text file in which the best-fit
2485 parameter values to be written
2486 (default is "": no file/logger output)
[2047]2487
2488 Example:
[2186]2489 bscan = scan.auto_sinusoid_baseline(addwn='<=10', insitu=False)
[2081]2490
2491 Note:
2492 The best-fit parameter values output in logger and/or blfile are now
2493 based on specunit of 'channel'.
[2047]2494 """
2495
[2186]2496 try:
2497 varlist = vars()
[2047]2498
[2186]2499 if insitu is None: insitu = rcParams['insitu']
2500 if insitu:
2501 workscan = self
[2047]2502 else:
[2186]2503 workscan = self.copy()
2504
[2277]2505 if mask is None: mask = [True for i in xrange(workscan.nchan())]
[2186]2506 if applyfft is None: applyfft = True
2507 if fftmethod is None: fftmethod = 'fft'
2508 if fftthresh is None: fftthresh = 3.0
2509 if addwn is None: addwn = []
2510 if rejwn is None: rejwn = []
2511 if clipthresh is None: clipthresh = 3.0
2512 if clipniter is None: clipniter = 0
2513 if edge is None: edge = (0,0)
2514 if threshold is None: threshold = 3
2515 if chan_avg_limit is None: chan_avg_limit = 1
2516 if plot is None: plot = False
2517 if getresidual is None: getresidual = True
[2189]2518 if showprogress is None: showprogress = True
2519 if minnrow is None: minnrow = 1000
[2186]2520 if outlog is None: outlog = False
2521 if blfile is None: blfile = ''
[2047]2522
[2277]2523 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
2524 workscan._auto_sinusoid_baseline(mask, applyfft, fftmethod.lower(), str(fftthresh).lower(), workscan._parse_wn(addwn), workscan._parse_wn(rejwn), clipthresh, clipniter, normalise_edge_param(edge), threshold, chan_avg_limit, getresidual, pack_progress_params(showprogress, minnrow), outlog, blfile)
[2047]2525 workscan._add_history("auto_sinusoid_baseline", varlist)
2526
2527 if insitu:
2528 self._assign(workscan)
2529 else:
2530 return workscan
2531
2532 except RuntimeError, e:
[2186]2533 raise_fitting_failure_exception(e)
[2047]2534
2535 @asaplog_post_dec
[2277]2536 def cspline_baseline(self, insitu=None, mask=None, npiece=None, clipthresh=None, clipniter=None,
2537 plot=None, getresidual=None, showprogress=None, minnrow=None, outlog=None, blfile=None):
[1846]2538 """\
[2277]2539 Return a scan which has been baselined (all rows) by cubic spline function (piecewise cubic polynomial).
[513]2540 Parameters:
[2189]2541 insitu: If False a new scantable is returned.
2542 Otherwise, the scaling is done in-situ
2543 The default is taken from .asaprc (False)
2544 mask: An optional mask
2545 npiece: Number of pieces. (default is 2)
2546 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
[2277]2547 clipniter: maximum number of iteration of 'clipthresh'-sigma clipping (default is 0)
[2189]2548 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2549 plot the fit and the residual. In this each
2550 indivual fit has to be approved, by typing 'y'
2551 or 'n'
2552 getresidual: if False, returns best-fit values instead of
2553 residual. (default is True)
2554 showprogress: show progress status for large data.
2555 default is True.
2556 minnrow: minimum number of input spectra to show.
2557 default is 1000.
2558 outlog: Output the coefficients of the best-fit
2559 function to logger (default is False)
2560 blfile: Name of a text file in which the best-fit
2561 parameter values to be written
2562 (default is "": no file/logger output)
[1846]2563
[2012]2564 Example:
[2277]2565 # return a scan baselined by a cubic spline consisting of 2 pieces (i.e., 1 internal knot),
[2012]2566 # also with 3-sigma clipping, iteration up to 4 times
2567 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
[2081]2568
2569 Note:
2570 The best-fit parameter values output in logger and/or blfile are now
2571 based on specunit of 'channel'.
[2012]2572 """
2573
[2186]2574 try:
2575 varlist = vars()
2576
2577 if insitu is None: insitu = rcParams['insitu']
2578 if insitu:
2579 workscan = self
2580 else:
2581 workscan = self.copy()
[1855]2582
[2277]2583 if mask is None: mask = [True for i in xrange(workscan.nchan())]
[2189]2584 if npiece is None: npiece = 2
2585 if clipthresh is None: clipthresh = 3.0
2586 if clipniter is None: clipniter = 0
2587 if plot is None: plot = False
2588 if getresidual is None: getresidual = True
2589 if showprogress is None: showprogress = True
2590 if minnrow is None: minnrow = 1000
2591 if outlog is None: outlog = False
2592 if blfile is None: blfile = ''
[1855]2593
[2012]2594 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
[2277]2595 workscan._cspline_baseline(mask, npiece, clipthresh, clipniter, getresidual, pack_progress_params(showprogress, minnrow), outlog, blfile)
[2012]2596 workscan._add_history("cspline_baseline", varlist)
2597
2598 if insitu:
2599 self._assign(workscan)
2600 else:
2601 return workscan
2602
2603 except RuntimeError, e:
[2186]2604 raise_fitting_failure_exception(e)
[1855]2605
[2186]2606 @asaplog_post_dec
[2277]2607 def auto_cspline_baseline(self, insitu=None, mask=None, npiece=None, clipthresh=None, clipniter=None,
2608 edge=None, threshold=None, chan_avg_limit=None, getresidual=None, plot=None,
2609 showprogress=None, minnrow=None, outlog=None, blfile=None):
[2012]2610 """\
2611 Return a scan which has been baselined (all rows) by cubic spline
2612 function (piecewise cubic polynomial).
2613 Spectral lines are detected first using linefinder and masked out
2614 to avoid them affecting the baseline solution.
2615
2616 Parameters:
[2189]2617 insitu: if False a new scantable is returned.
2618 Otherwise, the scaling is done in-situ
2619 The default is taken from .asaprc (False)
2620 mask: an optional mask retreived from scantable
2621 npiece: Number of pieces. (default is 2)
2622 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
[2277]2623 clipniter: maximum number of iteration of 'clipthresh'-sigma clipping (default is 0)
[2189]2624 edge: an optional number of channel to drop at
2625 the edge of spectrum. If only one value is
2626 specified, the same number will be dropped
2627 from both sides of the spectrum. Default
2628 is to keep all channels. Nested tuples
2629 represent individual edge selection for
2630 different IFs (a number of spectral channels
2631 can be different)
2632 threshold: the threshold used by line finder. It is
2633 better to keep it large as only strong lines
2634 affect the baseline solution.
2635 chan_avg_limit: a maximum number of consequtive spectral
2636 channels to average during the search of
2637 weak and broad lines. The default is no
2638 averaging (and no search for weak lines).
2639 If such lines can affect the fitted baseline
2640 (e.g. a high order polynomial is fitted),
2641 increase this parameter (usually values up
2642 to 8 are reasonable). Most users of this
2643 method should find the default value sufficient.
2644 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2645 plot the fit and the residual. In this each
2646 indivual fit has to be approved, by typing 'y'
2647 or 'n'
2648 getresidual: if False, returns best-fit values instead of
2649 residual. (default is True)
2650 showprogress: show progress status for large data.
2651 default is True.
2652 minnrow: minimum number of input spectra to show.
2653 default is 1000.
2654 outlog: Output the coefficients of the best-fit
2655 function to logger (default is False)
2656 blfile: Name of a text file in which the best-fit
2657 parameter values to be written
2658 (default is "": no file/logger output)
[1846]2659
[1907]2660 Example:
[2012]2661 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
[2081]2662
2663 Note:
2664 The best-fit parameter values output in logger and/or blfile are now
2665 based on specunit of 'channel'.
[2012]2666 """
[1846]2667
[2186]2668 try:
2669 varlist = vars()
[2012]2670
[2186]2671 if insitu is None: insitu = rcParams['insitu']
2672 if insitu:
2673 workscan = self
[1391]2674 else:
[2186]2675 workscan = self.copy()
2676
[2277]2677 if mask is None: mask = [True for i in xrange(workscan.nchan())]
[2186]2678 if npiece is None: npiece = 2
2679 if clipthresh is None: clipthresh = 3.0
2680 if clipniter is None: clipniter = 0
2681 if edge is None: edge = (0, 0)
2682 if threshold is None: threshold = 3
2683 if chan_avg_limit is None: chan_avg_limit = 1
2684 if plot is None: plot = False
2685 if getresidual is None: getresidual = True
[2189]2686 if showprogress is None: showprogress = True
2687 if minnrow is None: minnrow = 1000
[2186]2688 if outlog is None: outlog = False
2689 if blfile is None: blfile = ''
[1819]2690
[2277]2691 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
[2269]2692 workscan._auto_cspline_baseline(mask, npiece, clipthresh,
2693 clipniter,
2694 normalise_edge_param(edge),
2695 threshold,
2696 chan_avg_limit, getresidual,
2697 pack_progress_params(showprogress,
2698 minnrow),
2699 outlog, blfile)
[2012]2700 workscan._add_history("auto_cspline_baseline", varlist)
[1907]2701
[1856]2702 if insitu:
2703 self._assign(workscan)
2704 else:
2705 return workscan
[2012]2706
2707 except RuntimeError, e:
[2186]2708 raise_fitting_failure_exception(e)
[513]2709
[1931]2710 @asaplog_post_dec
[2269]2711 def poly_baseline(self, mask=None, order=None, insitu=None, plot=None,
2712 getresidual=None, showprogress=None, minnrow=None,
2713 outlog=None, blfile=None):
[1907]2714 """\
2715 Return a scan which has been baselined (all rows) by a polynomial.
2716 Parameters:
[2189]2717 insitu: if False a new scantable is returned.
2718 Otherwise, the scaling is done in-situ
2719 The default is taken from .asaprc (False)
2720 mask: an optional mask
2721 order: the order of the polynomial (default is 0)
2722 plot: plot the fit and the residual. In this each
2723 indivual fit has to be approved, by typing 'y'
2724 or 'n'
2725 getresidual: if False, returns best-fit values instead of
2726 residual. (default is True)
2727 showprogress: show progress status for large data.
2728 default is True.
2729 minnrow: minimum number of input spectra to show.
2730 default is 1000.
2731 outlog: Output the coefficients of the best-fit
2732 function to logger (default is False)
2733 blfile: Name of a text file in which the best-fit
2734 parameter values to be written
2735 (default is "": no file/logger output)
[2012]2736
[1907]2737 Example:
2738 # return a scan baselined by a third order polynomial,
2739 # not using a mask
2740 bscan = scan.poly_baseline(order=3)
2741 """
[1931]2742
[2186]2743 try:
2744 varlist = vars()
[1931]2745
[2269]2746 if insitu is None:
2747 insitu = rcParams["insitu"]
[2186]2748 if insitu:
2749 workscan = self
2750 else:
2751 workscan = self.copy()
[1907]2752
[2277]2753 if mask is None: mask = [True for i in \
2754 xrange(workscan.nchan())]
[2189]2755 if order is None: order = 0
2756 if plot is None: plot = False
2757 if getresidual is None: getresidual = True
2758 if showprogress is None: showprogress = True
2759 if minnrow is None: minnrow = 1000
2760 if outlog is None: outlog = False
2761 if blfile is None: blfile = ""
[1907]2762
[2012]2763 if plot:
[2269]2764 outblfile = (blfile != "") and \
2765 os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
2766 if outblfile:
2767 blf = open(blfile, "a")
[2012]2768
[1907]2769 f = fitter()
2770 f.set_function(lpoly=order)
[2186]2771
2772 rows = xrange(workscan.nrow())
2773 #if len(rows) > 0: workscan._init_blinfo()
2774
[1907]2775 for r in rows:
2776 f.x = workscan._getabcissa(r)
2777 f.y = workscan._getspectrum(r)
2778 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
2779 f.data = None
2780 f.fit()
2781
2782 f.plot(residual=True)
2783 accept_fit = raw_input("Accept fit ( [y]/n ): ")
2784 if accept_fit.upper() == "N":
[2012]2785 #workscan._append_blinfo(None, None, None)
[1907]2786 continue
[2012]2787
2788 blpars = f.get_parameters()
2789 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
2790 #workscan._append_blinfo(blpars, masklist, f.mask)
[2269]2791 workscan._setspectrum((f.fitter.getresidual()
2792 if getresidual else f.fitter.getfit()), r)
[1907]2793
[2012]2794 if outblfile:
2795 rms = workscan.get_rms(f.mask, r)
[2269]2796 dataout = \
2797 workscan.format_blparams_row(blpars["params"],
2798 blpars["fixed"],
2799 rms, str(masklist),
2800 r, True)
[2012]2801 blf.write(dataout)
2802
[1907]2803 f._p.unmap()
2804 f._p = None
[2012]2805
2806 if outblfile: blf.close()
[1907]2807 else:
[2269]2808 workscan._poly_baseline(mask, order, getresidual,
2809 pack_progress_params(showprogress,
2810 minnrow),
2811 outlog, blfile)
[1907]2812
2813 workscan._add_history("poly_baseline", varlist)
2814
2815 if insitu:
2816 self._assign(workscan)
2817 else:
2818 return workscan
2819
[1919]2820 except RuntimeError, e:
[2186]2821 raise_fitting_failure_exception(e)
[1907]2822
[2186]2823 @asaplog_post_dec
[2269]2824 def auto_poly_baseline(self, mask=None, order=None, edge=None,
2825 threshold=None, chan_avg_limit=None,
2826 plot=None, insitu=None,
2827 getresidual=None, showprogress=None,
2828 minnrow=None, outlog=None, blfile=None):
[1846]2829 """\
[1931]2830 Return a scan which has been baselined (all rows) by a polynomial.
[880]2831 Spectral lines are detected first using linefinder and masked out
2832 to avoid them affecting the baseline solution.
2833
2834 Parameters:
[2189]2835 insitu: if False a new scantable is returned.
2836 Otherwise, the scaling is done in-situ
2837 The default is taken from .asaprc (False)
2838 mask: an optional mask retreived from scantable
2839 order: the order of the polynomial (default is 0)
2840 edge: an optional number of channel to drop at
2841 the edge of spectrum. If only one value is
2842 specified, the same number will be dropped
2843 from both sides of the spectrum. Default
2844 is to keep all channels. Nested tuples
2845 represent individual edge selection for
2846 different IFs (a number of spectral channels
2847 can be different)
2848 threshold: the threshold used by line finder. It is
2849 better to keep it large as only strong lines
2850 affect the baseline solution.
2851 chan_avg_limit: a maximum number of consequtive spectral
2852 channels to average during the search of
2853 weak and broad lines. The default is no
2854 averaging (and no search for weak lines).
2855 If such lines can affect the fitted baseline
2856 (e.g. a high order polynomial is fitted),
2857 increase this parameter (usually values up
2858 to 8 are reasonable). Most users of this
2859 method should find the default value sufficient.
2860 plot: plot the fit and the residual. In this each
2861 indivual fit has to be approved, by typing 'y'
2862 or 'n'
2863 getresidual: if False, returns best-fit values instead of
2864 residual. (default is True)
2865 showprogress: show progress status for large data.
2866 default is True.
2867 minnrow: minimum number of input spectra to show.
2868 default is 1000.
2869 outlog: Output the coefficients of the best-fit
2870 function to logger (default is False)
2871 blfile: Name of a text file in which the best-fit
2872 parameter values to be written
2873 (default is "": no file/logger output)
[1846]2874
[2012]2875 Example:
2876 bscan = scan.auto_poly_baseline(order=7, insitu=False)
2877 """
[880]2878
[2186]2879 try:
2880 varlist = vars()
[1846]2881
[2269]2882 if insitu is None:
2883 insitu = rcParams['insitu']
[2186]2884 if insitu:
2885 workscan = self
2886 else:
2887 workscan = self.copy()
[1846]2888
[2277]2889 if mask is None: mask = [True for i in xrange(workscan.nchan())]
[2186]2890 if order is None: order = 0
2891 if edge is None: edge = (0, 0)
2892 if threshold is None: threshold = 3
2893 if chan_avg_limit is None: chan_avg_limit = 1
2894 if plot is None: plot = False
2895 if getresidual is None: getresidual = True
[2189]2896 if showprogress is None: showprogress = True
2897 if minnrow is None: minnrow = 1000
[2186]2898 if outlog is None: outlog = False
2899 if blfile is None: blfile = ''
[1846]2900
[2186]2901 edge = normalise_edge_param(edge)
[880]2902
[2012]2903 if plot:
[2269]2904 outblfile = (blfile != "") and \
2905 os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
[2012]2906 if outblfile: blf = open(blfile, "a")
2907
[2186]2908 from asap.asaplinefind import linefinder
[2012]2909 fl = linefinder()
[2269]2910 fl.set_options(threshold=threshold, avg_limit=chan_avg_limit)
[2012]2911 fl.set_scan(workscan)
[2186]2912
[2012]2913 f = fitter()
2914 f.set_function(lpoly=order)
[880]2915
[2186]2916 rows = xrange(workscan.nrow())
2917 #if len(rows) > 0: workscan._init_blinfo()
2918
[2012]2919 for r in rows:
[2186]2920 idx = 2*workscan.getif(r)
[2269]2921 fl.find_lines(r, mask_and(mask, workscan._getmask(r)),
2922 edge[idx:idx+2]) # (CAS-1434)
[907]2923
[2012]2924 f.x = workscan._getabcissa(r)
2925 f.y = workscan._getspectrum(r)
2926 f.mask = fl.get_mask()
2927 f.data = None
2928 f.fit()
2929
2930 f.plot(residual=True)
2931 accept_fit = raw_input("Accept fit ( [y]/n ): ")
2932 if accept_fit.upper() == "N":
2933 #workscan._append_blinfo(None, None, None)
2934 continue
2935
2936 blpars = f.get_parameters()
2937 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
2938 #workscan._append_blinfo(blpars, masklist, f.mask)
[2269]2939 workscan._setspectrum((f.fitter.getresidual() \
2940 if getresidual else f.fitter.getfit()), r)
[2012]2941
2942 if outblfile:
2943 rms = workscan.get_rms(f.mask, r)
[2269]2944 dataout = \
2945 workscan.format_blparams_row(blpars["params"],
2946 blpars["fixed"],
2947 rms, str(masklist),
2948 r, True)
[2012]2949 blf.write(dataout)
2950
2951 f._p.unmap()
2952 f._p = None
2953
2954 if outblfile: blf.close()
2955 else:
[2269]2956 workscan._auto_poly_baseline(mask, order, edge, threshold,
2957 chan_avg_limit, getresidual,
2958 pack_progress_params(showprogress,
2959 minnrow),
2960 outlog, blfile)
[2012]2961
2962 workscan._add_history("auto_poly_baseline", varlist)
2963
2964 if insitu:
2965 self._assign(workscan)
2966 else:
2967 return workscan
2968
2969 except RuntimeError, e:
[2186]2970 raise_fitting_failure_exception(e)
[2012]2971
2972 def _init_blinfo(self):
2973 """\
2974 Initialise the following three auxiliary members:
2975 blpars : parameters of the best-fit baseline,
2976 masklists : mask data (edge positions of masked channels) and
2977 actualmask : mask data (in boolean list),
2978 to keep for use later (including output to logger/text files).
2979 Used by poly_baseline() and auto_poly_baseline() in case of
2980 'plot=True'.
2981 """
2982 self.blpars = []
2983 self.masklists = []
2984 self.actualmask = []
2985 return
[880]2986
[2012]2987 def _append_blinfo(self, data_blpars, data_masklists, data_actualmask):
2988 """\
2989 Append baseline-fitting related info to blpars, masklist and
2990 actualmask.
2991 """
2992 self.blpars.append(data_blpars)
2993 self.masklists.append(data_masklists)
2994 self.actualmask.append(data_actualmask)
2995 return
2996
[1862]2997 @asaplog_post_dec
[914]2998 def rotate_linpolphase(self, angle):
[1846]2999 """\
[914]3000 Rotate the phase of the complex polarization O=Q+iU correlation.
3001 This is always done in situ in the raw data. So if you call this
3002 function more than once then each call rotates the phase further.
[1846]3003
[914]3004 Parameters:
[1846]3005
[914]3006 angle: The angle (degrees) to rotate (add) by.
[1846]3007
3008 Example::
3009
[914]3010 scan.rotate_linpolphase(2.3)
[1846]3011
[914]3012 """
3013 varlist = vars()
[936]3014 self._math._rotate_linpolphase(self, angle)
[914]3015 self._add_history("rotate_linpolphase", varlist)
3016 return
[710]3017
[1862]3018 @asaplog_post_dec
[914]3019 def rotate_xyphase(self, angle):
[1846]3020 """\
[914]3021 Rotate the phase of the XY correlation. This is always done in situ
3022 in the data. So if you call this function more than once
3023 then each call rotates the phase further.
[1846]3024
[914]3025 Parameters:
[1846]3026
[914]3027 angle: The angle (degrees) to rotate (add) by.
[1846]3028
3029 Example::
3030
[914]3031 scan.rotate_xyphase(2.3)
[1846]3032
[914]3033 """
3034 varlist = vars()
[936]3035 self._math._rotate_xyphase(self, angle)
[914]3036 self._add_history("rotate_xyphase", varlist)
3037 return
3038
[1862]3039 @asaplog_post_dec
[914]3040 def swap_linears(self):
[1846]3041 """\
[1573]3042 Swap the linear polarisations XX and YY, or better the first two
[1348]3043 polarisations as this also works for ciculars.
[914]3044 """
3045 varlist = vars()
[936]3046 self._math._swap_linears(self)
[914]3047 self._add_history("swap_linears", varlist)
3048 return
3049
[1862]3050 @asaplog_post_dec
[914]3051 def invert_phase(self):
[1846]3052 """\
[914]3053 Invert the phase of the complex polarisation
3054 """
3055 varlist = vars()
[936]3056 self._math._invert_phase(self)
[914]3057 self._add_history("invert_phase", varlist)
3058 return
3059
[1862]3060 @asaplog_post_dec
[876]3061 def add(self, offset, insitu=None):
[1846]3062 """\
[513]3063 Return a scan where all spectra have the offset added
[1846]3064
[513]3065 Parameters:
[1846]3066
[513]3067 offset: the offset
[1855]3068
[513]3069 insitu: if False a new scantable is returned.
3070 Otherwise, the scaling is done in-situ
3071 The default is taken from .asaprc (False)
[1846]3072
[513]3073 """
3074 if insitu is None: insitu = rcParams['insitu']
[876]3075 self._math._setinsitu(insitu)
[513]3076 varlist = vars()
[876]3077 s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]3078 s._add_history("add", varlist)
[876]3079 if insitu:
3080 self._assign(s)
3081 else:
[513]3082 return s
3083
[1862]3084 @asaplog_post_dec
[1308]3085 def scale(self, factor, tsys=True, insitu=None):
[1846]3086 """\
3087
[1938]3088 Return a scan where all spectra are scaled by the given 'factor'
[1846]3089
[513]3090 Parameters:
[1846]3091
[1819]3092 factor: the scaling factor (float or 1D float list)
[1855]3093
[513]3094 insitu: if False a new scantable is returned.
3095 Otherwise, the scaling is done in-situ
3096 The default is taken from .asaprc (False)
[1855]3097
[513]3098 tsys: if True (default) then apply the operation to Tsys
3099 as well as the data
[1846]3100
[513]3101 """
3102 if insitu is None: insitu = rcParams['insitu']
[876]3103 self._math._setinsitu(insitu)
[513]3104 varlist = vars()
[1819]3105 s = None
3106 import numpy
3107 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
3108 if isinstance(factor[0], list) or isinstance(factor[0], numpy.ndarray):
3109 from asapmath import _array2dOp
3110 s = _array2dOp( self.copy(), factor, "MUL", tsys )
3111 else:
3112 s = scantable( self._math._arrayop( self.copy(), factor, "MUL", tsys ) )
3113 else:
3114 s = scantable(self._math._unaryop(self.copy(), factor, "MUL", tsys))
[1118]3115 s._add_history("scale", varlist)
[876]3116 if insitu:
3117 self._assign(s)
3118 else:
[513]3119 return s
3120
[1504]3121 def set_sourcetype(self, match, matchtype="pattern",
3122 sourcetype="reference"):
[1846]3123 """\
[1502]3124 Set the type of the source to be an source or reference scan
[1846]3125 using the provided pattern.
3126
[1502]3127 Parameters:
[1846]3128
[1504]3129 match: a Unix style pattern, regular expression or selector
[1855]3130
[1504]3131 matchtype: 'pattern' (default) UNIX style pattern or
3132 'regex' regular expression
[1855]3133
[1502]3134 sourcetype: the type of the source to use (source/reference)
[1846]3135
[1502]3136 """
3137 varlist = vars()
3138 basesel = self.get_selection()
3139 stype = -1
3140 if sourcetype.lower().startswith("r"):
3141 stype = 1
3142 elif sourcetype.lower().startswith("s"):
3143 stype = 0
[1504]3144 else:
[1502]3145 raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
[1504]3146 if matchtype.lower().startswith("p"):
3147 matchtype = "pattern"
3148 elif matchtype.lower().startswith("r"):
3149 matchtype = "regex"
3150 else:
3151 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
[1502]3152 sel = selector()
3153 if isinstance(match, selector):
3154 sel = match
3155 else:
[1504]3156 sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
[1502]3157 self.set_selection(basesel+sel)
3158 self._setsourcetype(stype)
3159 self.set_selection(basesel)
[1573]3160 self._add_history("set_sourcetype", varlist)
[1502]3161
[1862]3162 @asaplog_post_dec
[1857]3163 @preserve_selection
[1819]3164 def auto_quotient(self, preserve=True, mode='paired', verify=False):
[1846]3165 """\
[670]3166 This function allows to build quotients automatically.
[1819]3167 It assumes the observation to have the same number of
[670]3168 "ons" and "offs"
[1846]3169
[670]3170 Parameters:
[1846]3171
[710]3172 preserve: you can preserve (default) the continuum or
3173 remove it. The equations used are
[1857]3174
[670]3175 preserve: Output = Toff * (on/off) - Toff
[1857]3176
[1070]3177 remove: Output = Toff * (on/off) - Ton
[1855]3178
[1573]3179 mode: the on/off detection mode
[1348]3180 'paired' (default)
3181 identifies 'off' scans by the
3182 trailing '_R' (Mopra/Parkes) or
3183 '_e'/'_w' (Tid) and matches
3184 on/off pairs from the observing pattern
[1502]3185 'time'
3186 finds the closest off in time
[1348]3187
[1857]3188 .. todo:: verify argument is not implemented
3189
[670]3190 """
[1857]3191 varlist = vars()
[1348]3192 modes = ["time", "paired"]
[670]3193 if not mode in modes:
[876]3194 msg = "please provide valid mode. Valid modes are %s" % (modes)
3195 raise ValueError(msg)
[1348]3196 s = None
3197 if mode.lower() == "paired":
[1857]3198 sel = self.get_selection()
[1875]3199 sel.set_query("SRCTYPE==psoff")
[1356]3200 self.set_selection(sel)
[1348]3201 offs = self.copy()
[1875]3202 sel.set_query("SRCTYPE==pson")
[1356]3203 self.set_selection(sel)
[1348]3204 ons = self.copy()
3205 s = scantable(self._math._quotient(ons, offs, preserve))
3206 elif mode.lower() == "time":
3207 s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]3208 s._add_history("auto_quotient", varlist)
[876]3209 return s
[710]3210
[1862]3211 @asaplog_post_dec
[1145]3212 def mx_quotient(self, mask = None, weight='median', preserve=True):
[1846]3213 """\
[1143]3214 Form a quotient using "off" beams when observing in "MX" mode.
[1846]3215
[1143]3216 Parameters:
[1846]3217
[1145]3218 mask: an optional mask to be used when weight == 'stddev'
[1855]3219
[1143]3220 weight: How to average the off beams. Default is 'median'.
[1855]3221
[1145]3222 preserve: you can preserve (default) the continuum or
[1855]3223 remove it. The equations used are:
[1846]3224
[1855]3225 preserve: Output = Toff * (on/off) - Toff
3226
3227 remove: Output = Toff * (on/off) - Ton
3228
[1217]3229 """
[1593]3230 mask = mask or ()
[1141]3231 varlist = vars()
3232 on = scantable(self._math._mx_extract(self, 'on'))
[1143]3233 preoff = scantable(self._math._mx_extract(self, 'off'))
3234 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1217]3235 from asapmath import quotient
[1145]3236 q = quotient(on, off, preserve)
[1143]3237 q._add_history("mx_quotient", varlist)
[1217]3238 return q
[513]3239
[1862]3240 @asaplog_post_dec
[718]3241 def freq_switch(self, insitu=None):
[1846]3242 """\
[718]3243 Apply frequency switching to the data.
[1846]3244
[718]3245 Parameters:
[1846]3246
[718]3247 insitu: if False a new scantable is returned.
3248 Otherwise, the swictching is done in-situ
3249 The default is taken from .asaprc (False)
[1846]3250
[718]3251 """
3252 if insitu is None: insitu = rcParams['insitu']
[876]3253 self._math._setinsitu(insitu)
[718]3254 varlist = vars()
[876]3255 s = scantable(self._math._freqswitch(self))
[1118]3256 s._add_history("freq_switch", varlist)
[1856]3257 if insitu:
3258 self._assign(s)
3259 else:
3260 return s
[718]3261
[1862]3262 @asaplog_post_dec
[780]3263 def recalc_azel(self):
[1846]3264 """Recalculate the azimuth and elevation for each position."""
[780]3265 varlist = vars()
[876]3266 self._recalcazel()
[780]3267 self._add_history("recalc_azel", varlist)
3268 return
3269
[1862]3270 @asaplog_post_dec
[513]3271 def __add__(self, other):
3272 varlist = vars()
3273 s = None
3274 if isinstance(other, scantable):
[1573]3275 s = scantable(self._math._binaryop(self, other, "ADD"))
[513]3276 elif isinstance(other, float):
[876]3277 s = scantable(self._math._unaryop(self, other, "ADD", False))
[2144]3278 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3279 if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
3280 from asapmath import _array2dOp
3281 s = _array2dOp( self.copy(), other, "ADD", False )
3282 else:
3283 s = scantable( self._math._arrayop( self.copy(), other, "ADD", False ) )
[513]3284 else:
[718]3285 raise TypeError("Other input is not a scantable or float value")
[513]3286 s._add_history("operator +", varlist)
3287 return s
3288
[1862]3289 @asaplog_post_dec
[513]3290 def __sub__(self, other):
3291 """
3292 implicit on all axes and on Tsys
3293 """
3294 varlist = vars()
3295 s = None
3296 if isinstance(other, scantable):
[1588]3297 s = scantable(self._math._binaryop(self, other, "SUB"))
[513]3298 elif isinstance(other, float):
[876]3299 s = scantable(self._math._unaryop(self, other, "SUB", False))
[2144]3300 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
[2277]3301 if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
[2144]3302 from asapmath import _array2dOp
3303 s = _array2dOp( self.copy(), other, "SUB", False )
3304 else:
[2277]3305 s = scantable( self._math._arrayop( self.copy(), other, "SUB", False ) )
[513]3306 else:
[718]3307 raise TypeError("Other input is not a scantable or float value")
[513]3308 s._add_history("operator -", varlist)
3309 return s
[710]3310
[1862]3311 @asaplog_post_dec
[513]3312 def __mul__(self, other):
3313 """
3314 implicit on all axes and on Tsys
3315 """
3316 varlist = vars()
3317 s = None
3318 if isinstance(other, scantable):
[1588]3319 s = scantable(self._math._binaryop(self, other, "MUL"))
[513]3320 elif isinstance(other, float):
[876]3321 s = scantable(self._math._unaryop(self, other, "MUL", False))
[2144]3322 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
[2277]3323 if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
[2144]3324 from asapmath import _array2dOp
3325 s = _array2dOp( self.copy(), other, "MUL", False )
3326 else:
[2277]3327 s = scantable( self._math._arrayop( self.copy(), other, "MUL", False ) )
[513]3328 else:
[718]3329 raise TypeError("Other input is not a scantable or float value")
[513]3330 s._add_history("operator *", varlist)
3331 return s
3332
[710]3333
[1862]3334 @asaplog_post_dec
[513]3335 def __div__(self, other):
3336 """
3337 implicit on all axes and on Tsys
3338 """
3339 varlist = vars()
3340 s = None
3341 if isinstance(other, scantable):
[1589]3342 s = scantable(self._math._binaryop(self, other, "DIV"))
[513]3343 elif isinstance(other, float):
3344 if other == 0.0:
[718]3345 raise ZeroDivisionError("Dividing by zero is not recommended")
[876]3346 s = scantable(self._math._unaryop(self, other, "DIV", False))
[2144]3347 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
[2277]3348 if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
[2144]3349 from asapmath import _array2dOp
3350 s = _array2dOp( self.copy(), other, "DIV", False )
3351 else:
[2277]3352 s = scantable( self._math._arrayop( self.copy(), other, "DIV", False ) )
[513]3353 else:
[718]3354 raise TypeError("Other input is not a scantable or float value")
[513]3355 s._add_history("operator /", varlist)
3356 return s
3357
[1862]3358 @asaplog_post_dec
[530]3359 def get_fit(self, row=0):
[1846]3360 """\
[530]3361 Print or return the stored fits for a row in the scantable
[1846]3362
[530]3363 Parameters:
[1846]3364
[530]3365 row: the row which the fit has been applied to.
[1846]3366
[530]3367 """
3368 if row > self.nrow():
3369 return
[976]3370 from asap.asapfit import asapfit
[530]3371 fit = asapfit(self._getfit(row))
[1859]3372 asaplog.push( '%s' %(fit) )
3373 return fit.as_dict()
[530]3374
[1483]3375 def flag_nans(self):
[1846]3376 """\
[1483]3377 Utility function to flag NaN values in the scantable.
3378 """
3379 import numpy
3380 basesel = self.get_selection()
3381 for i in range(self.nrow()):
[1589]3382 sel = self.get_row_selector(i)
3383 self.set_selection(basesel+sel)
[1483]3384 nans = numpy.isnan(self._getspectrum(0))
3385 if numpy.any(nans):
3386 bnans = [ bool(v) for v in nans]
3387 self.flag(bnans)
3388 self.set_selection(basesel)
3389
[1588]3390 def get_row_selector(self, rowno):
[1992]3391 #return selector(beams=self.getbeam(rowno),
3392 # ifs=self.getif(rowno),
3393 # pols=self.getpol(rowno),
3394 # scans=self.getscan(rowno),
3395 # cycles=self.getcycle(rowno))
3396 return selector(rows=[rowno])
[1573]3397
[484]3398 def _add_history(self, funcname, parameters):
[1435]3399 if not rcParams['scantable.history']:
3400 return
[484]3401 # create date
3402 sep = "##"
3403 from datetime import datetime
3404 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
3405 hist = dstr+sep
3406 hist += funcname+sep#cdate+sep
3407 if parameters.has_key('self'): del parameters['self']
[1118]3408 for k, v in parameters.iteritems():
[484]3409 if type(v) is dict:
[1118]3410 for k2, v2 in v.iteritems():
[484]3411 hist += k2
3412 hist += "="
[1118]3413 if isinstance(v2, scantable):
[484]3414 hist += 'scantable'
3415 elif k2 == 'mask':
[1118]3416 if isinstance(v2, list) or isinstance(v2, tuple):
[513]3417 hist += str(self._zip_mask(v2))
3418 else:
3419 hist += str(v2)
[484]3420 else:
[513]3421 hist += str(v2)
[484]3422 else:
3423 hist += k
3424 hist += "="
[1118]3425 if isinstance(v, scantable):
[484]3426 hist += 'scantable'
3427 elif k == 'mask':
[1118]3428 if isinstance(v, list) or isinstance(v, tuple):
[513]3429 hist += str(self._zip_mask(v))
3430 else:
3431 hist += str(v)
[484]3432 else:
3433 hist += str(v)
3434 hist += sep
3435 hist = hist[:-2] # remove trailing '##'
3436 self._addhistory(hist)
3437
[710]3438
[484]3439 def _zip_mask(self, mask):
3440 mask = list(mask)
3441 i = 0
3442 segments = []
3443 while mask[i:].count(1):
3444 i += mask[i:].index(1)
3445 if mask[i:].count(0):
3446 j = i + mask[i:].index(0)
3447 else:
[710]3448 j = len(mask)
[1118]3449 segments.append([i, j])
[710]3450 i = j
[484]3451 return segments
[714]3452
[626]3453 def _get_ordinate_label(self):
3454 fu = "("+self.get_fluxunit()+")"
3455 import re
3456 lbl = "Intensity"
[1118]3457 if re.match(".K.", fu):
[626]3458 lbl = "Brightness Temperature "+ fu
[1118]3459 elif re.match(".Jy.", fu):
[626]3460 lbl = "Flux density "+ fu
3461 return lbl
[710]3462
[876]3463 def _check_ifs(self):
[1986]3464 #nchans = [self.nchan(i) for i in range(self.nif(-1))]
3465 nchans = [self.nchan(i) for i in self.getifnos()]
[2004]3466 nchans = filter(lambda t: t > 0, nchans)
[876]3467 return (sum(nchans)/len(nchans) == nchans[0])
[976]3468
[1862]3469 @asaplog_post_dec
[1916]3470 #def _fill(self, names, unit, average, getpt, antenna):
3471 def _fill(self, names, unit, average, opts={}):
[976]3472 first = True
3473 fullnames = []
3474 for name in names:
3475 name = os.path.expandvars(name)
3476 name = os.path.expanduser(name)
3477 if not os.path.exists(name):
3478 msg = "File '%s' does not exists" % (name)
3479 raise IOError(msg)
3480 fullnames.append(name)
3481 if average:
3482 asaplog.push('Auto averaging integrations')
[1079]3483 stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]3484 for name in fullnames:
[1073]3485 tbl = Scantable(stype)
[2004]3486 if is_ms( name ):
3487 r = msfiller( tbl )
3488 else:
3489 r = filler( tbl )
3490 rx = rcParams['scantable.reference']
3491 r.setreferenceexpr(rx)
3492 #r = filler(tbl)
3493 #rx = rcParams['scantable.reference']
3494 #r.setreferenceexpr(rx)
[976]3495 msg = "Importing %s..." % (name)
[1118]3496 asaplog.push(msg, False)
[1916]3497 #opts = {'ms': {'antenna' : antenna, 'getpt': getpt} }
[1904]3498 r.open(name, opts)# antenna, -1, -1, getpt)
[1843]3499 r.fill()
[976]3500 if average:
[1118]3501 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]3502 if not first:
3503 tbl = self._math._merge([self, tbl])
3504 Scantable.__init__(self, tbl)
[1843]3505 r.close()
[1118]3506 del r, tbl
[976]3507 first = False
[1861]3508 #flush log
3509 asaplog.post()
[976]3510 if unit is not None:
3511 self.set_fluxunit(unit)
[1824]3512 if not is_casapy():
3513 self.set_freqframe(rcParams['scantable.freqframe'])
[976]3514
[2012]3515
[1402]3516 def __getitem__(self, key):
3517 if key < 0:
3518 key += self.nrow()
3519 if key >= self.nrow():
3520 raise IndexError("Row index out of range.")
3521 return self._getspectrum(key)
3522
3523 def __setitem__(self, key, value):
3524 if key < 0:
3525 key += self.nrow()
3526 if key >= self.nrow():
3527 raise IndexError("Row index out of range.")
3528 if not hasattr(value, "__len__") or \
3529 len(value) > self.nchan(self.getif(key)):
3530 raise ValueError("Spectrum length doesn't match.")
3531 return self._setspectrum(value, key)
3532
3533 def __len__(self):
3534 return self.nrow()
3535
3536 def __iter__(self):
3537 for i in range(len(self)):
3538 yield self[i]
Note: See TracBrowser for help on using the repository browser.