source: trunk/python/scantable.py@ 2753

Last change on this file since 2753 was 2753, checked in by Takeshi Nakazato, 12 years ago

New Development: No

JIRA Issue: No

Ready for Test: Yes

Interface Changes: No

What Interface Changed: Please list interface changes

Test Programs: List test programs

Put in Release Notes: Yes/No

Module(s): Module Names change impacts.

Description: Describe your changes here...

More bug fix on is_scantable()


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 162.0 KB
Line 
1"""This module defines the scantable class."""
2
3import os
4import re
5import tempfile
6import numpy
7try:
8 from functools import wraps as wraps_dec
9except ImportError:
10 from asap.compatibility import wraps as wraps_dec
11
12from asap.env import is_casapy
13from asap._asap import Scantable
14from asap._asap import filler, msfiller
15from asap.parameters import rcParams
16from asap.logging import asaplog, asaplog_post_dec
17from asap.selector import selector
18from asap.linecatalog import linecatalog
19from asap.coordinate import coordinate
20from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
21from asap.asapfitter import fitter
22
23def preserve_selection(func):
24 @wraps_dec(func)
25 def wrap(obj, *args, **kw):
26 basesel = obj.get_selection()
27 try:
28 val = func(obj, *args, **kw)
29 finally:
30 obj.set_selection(basesel)
31 return val
32 return wrap
33
34def is_scantable(filename):
35 """Is the given file a scantable?
36
37 Parameters:
38
39 filename: the name of the file/directory to test
40
41 """
42 if ( os.path.isdir(filename)
43 and os.path.exists(filename+'/table.info')
44 and os.path.exists(filename+'/table.dat') ):
45 f=open(filename+'/table.info')
46 l=f.readline()
47 f.close()
48 match_pattern = '^Type = (Scantable)? *$'
49 if re.match(match_pattern,l):
50 return True
51 else:
52 return False
53 else:
54 return False
55## return (os.path.isdir(filename)
56## and not os.path.exists(filename+'/table.f1')
57## and os.path.exists(filename+'/table.info'))
58
59def is_ms(filename):
60 """Is the given file a MeasurementSet?
61
62 Parameters:
63
64 filename: the name of the file/directory to test
65
66 """
67 if ( os.path.isdir(filename)
68 and os.path.exists(filename+'/table.info')
69 and os.path.exists(filename+'/table.dat') ):
70 f=open(filename+'/table.info')
71 l=f.readline()
72 f.close()
73 if ( l.find('Measurement Set') != -1 ):
74 return True
75 else:
76 return False
77 else:
78 return False
79
80def normalise_edge_param(edge):
81 """\
82 Convert a given edge value to a one-dimensional array that can be
83 given to baseline-fitting/subtraction functions.
84 The length of the output value will be an even because values for
85 the both sides of spectra are to be contained for each IF. When
86 the length is 2, the values will be applied to all IFs. If the length
87 is larger than 2, it will be 2*ifnos().
88 Accepted format of edge include:
89 * an integer - will be used for both sides of spectra of all IFs.
90 e.g. 10 is converted to [10,10]
91 * an empty list/tuple [] - converted to [0, 0] and used for all IFs.
92 * a list/tuple containing an integer - same as the above case.
93 e.g. [10] is converted to [10,10]
94 * a list/tuple containing two integers - will be used for all IFs.
95 e.g. [5,10] is output as it is. no need to convert.
96 * a list/tuple of lists/tuples containing TWO integers -
97 each element of edge will be used for each IF.
98 e.g. [[5,10],[15,20]] - [5,10] for IF[0] and [15,20] for IF[1].
99
100 If an element contains the same integer values, the input 'edge'
101 parameter can be given in a simpler shape in the following cases:
102 ** when len(edge)!=2
103 any elements containing the same values can be replaced
104 to single integers.
105 e.g. [[15,15]] can be simplified to [15] (or [15,15] or 15 also).
106 e.g. [[1,1],[2,2],[3,3]] can be simplified to [1,2,3].
107 ** when len(edge)=2
108 care is needed for this case: ONLY ONE of the
109 elements can be a single integer,
110 e.g. [[5,5],[10,10]] can be simplified to [5,[10,10]]
111 or [[5,5],10], but can NOT be simplified to [5,10].
112 when [5,10] given, it is interpreted as
113 [[5,10],[5,10],[5,10],...] instead, as shown before.
114 """
115 from asap import _is_sequence_or_number as _is_valid
116 if isinstance(edge, list) or isinstance(edge, tuple):
117 for edgepar in edge:
118 if not _is_valid(edgepar, int):
119 raise ValueError, "Each element of the 'edge' tuple has \
120 to be a pair of integers or an integer."
121 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
122 if len(edgepar) != 2:
123 raise ValueError, "Each element of the 'edge' tuple has \
124 to be a pair of integers or an integer."
125 else:
126 if not _is_valid(edge, int):
127 raise ValueError, "Parameter 'edge' has to be an integer or a \
128 pair of integers specified as a tuple. \
129 Nested tuples are allowed \
130 to make individual selection for different IFs."
131
132
133 if isinstance(edge, int):
134 edge = [ edge, edge ] # e.g. 3 => [3,3]
135 elif isinstance(edge, list) or isinstance(edge, tuple):
136 if len(edge) == 0:
137 edge = [0, 0] # e.g. [] => [0,0]
138 elif len(edge) == 1:
139 if isinstance(edge[0], int):
140 edge = [ edge[0], edge[0] ] # e.g. [1] => [1,1]
141
142 commonedge = True
143 if len(edge) > 2: commonedge = False
144 else:
145 for edgepar in edge:
146 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
147 commonedge = False
148 break
149
150 if commonedge:
151 if len(edge) > 1:
152 norm_edge = edge
153 else:
154 norm_edge = edge + edge
155 else:
156 norm_edge = []
157 for edgepar in edge:
158 if isinstance(edgepar, int):
159 norm_edge += [edgepar, edgepar]
160 else:
161 norm_edge += edgepar
162
163 return norm_edge
164
165def raise_fitting_failure_exception(e):
166 msg = "The fit failed, possibly because it didn't converge."
167 if rcParams["verbose"]:
168 asaplog.push(str(e))
169 asaplog.push(str(msg))
170 else:
171 raise RuntimeError(str(e)+'\n'+msg)
172
173def pack_progress_params(showprogress, minnrow):
174 return str(showprogress).lower() + ',' + str(minnrow)
175
176class scantable(Scantable):
177 """\
178 The ASAP container for scans (single-dish data).
179 """
180
181 @asaplog_post_dec
182 def __init__(self, filename, average=None, unit=None, parallactify=None,
183 **args):
184 """\
185 Create a scantable from a saved one or make a reference
186
187 Parameters:
188
189 filename: the name of an asap table on disk
190 or
191 the name of a rpfits/sdfits/ms file
192 (integrations within scans are auto averaged
193 and the whole file is read) or
194 [advanced] a reference to an existing scantable
195
196 average: average all integrations withinb a scan on read.
197 The default (True) is taken from .asaprc.
198
199 unit: brightness unit; must be consistent with K or Jy.
200 Over-rides the default selected by the filler
201 (input rpfits/sdfits/ms) or replaces the value
202 in existing scantables
203
204 antenna: for MeasurementSet input data only:
205 Antenna selection. integer (id) or string
206 (name or id).
207
208 parallactify: Indicate that the data had been parallactified.
209 Default (false) is taken from rc file.
210
211 """
212 if average is None:
213 average = rcParams['scantable.autoaverage']
214 parallactify = parallactify or rcParams['scantable.parallactify']
215 varlist = vars()
216 from asap._asap import stmath
217 self._math = stmath( rcParams['insitu'] )
218 if isinstance(filename, Scantable):
219 Scantable.__init__(self, filename)
220 else:
221 if isinstance(filename, str):
222 filename = os.path.expandvars(filename)
223 filename = os.path.expanduser(filename)
224 if not os.path.exists(filename):
225 s = "File '%s' not found." % (filename)
226 raise IOError(s)
227 if is_scantable(filename):
228 ondisk = rcParams['scantable.storage'] == 'disk'
229 Scantable.__init__(self, filename, ondisk)
230 if unit is not None:
231 self.set_fluxunit(unit)
232 if average:
233 self._assign( self.average_time( scanav=True ) )
234 # do not reset to the default freqframe
235 #self.set_freqframe(rcParams['scantable.freqframe'])
236 elif is_ms(filename):
237 # Measurement Set
238 opts={'ms': {}}
239 mskeys=['getpt','antenna','freq_tolsr']
240 for key in mskeys:
241 if key in args.keys():
242 opts['ms'][key] = args[key]
243 self._fill([filename], unit, average, opts)
244 elif os.path.isfile(filename):
245 self._fill([filename], unit, average)
246 # only apply to new data not "copy constructor"
247 self.parallactify(parallactify)
248 else:
249 msg = "The given file '%s'is not a valid " \
250 "asap table." % (filename)
251 raise IOError(msg)
252 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
253 and isinstance(filename[-1], str):
254 self._fill(filename, unit, average)
255 self.parallactify(parallactify)
256 self._add_history("scantable", varlist)
257
258 @asaplog_post_dec
259 def save(self, name=None, format=None, overwrite=False):
260 """\
261 Store the scantable on disk. This can be an asap (aips++) Table,
262 SDFITS or MS2 format.
263
264 Parameters:
265
266 name: the name of the outputfile. For format 'ASCII'
267 this is the root file name (data in 'name'.txt
268 and header in 'name'_header.txt)
269
270 format: an optional file format. Default is ASAP.
271 Allowed are:
272
273 * 'ASAP' (save as ASAP [aips++] Table),
274 * 'SDFITS' (save as SDFITS file)
275 * 'ASCII' (saves as ascii text file)
276 * 'MS2' (saves as an casacore MeasurementSet V2)
277 * 'FITS' (save as image FITS - not readable by
278 class)
279 * 'CLASS' (save as FITS readable by CLASS)
280
281 overwrite: If the file should be overwritten if it exists.
282 The default False is to return with warning
283 without writing the output. USE WITH CARE.
284
285 Example::
286
287 scan.save('myscan.asap')
288 scan.save('myscan.sdfits', 'SDFITS')
289
290 """
291 from os import path
292 format = format or rcParams['scantable.save']
293 suffix = '.'+format.lower()
294 if name is None or name == "":
295 name = 'scantable'+suffix
296 msg = "No filename given. Using default name %s..." % name
297 asaplog.push(msg)
298 name = path.expandvars(name)
299 if path.isfile(name) or path.isdir(name):
300 if not overwrite:
301 msg = "File %s exists." % name
302 raise IOError(msg)
303 format2 = format.upper()
304 if format2 == 'ASAP':
305 self._save(name)
306 elif format2 == 'MS2':
307 msopt = {'ms': {'overwrite': overwrite } }
308 from asap._asap import mswriter
309 writer = mswriter( self )
310 writer.write( name, msopt )
311 else:
312 from asap._asap import stwriter as stw
313 writer = stw(format2)
314 writer.write(self, name)
315 return
316
317 def copy(self):
318 """Return a copy of this scantable.
319
320 *Note*:
321
322 This makes a full (deep) copy. scan2 = scan1 makes a reference.
323
324 Example::
325
326 copiedscan = scan.copy()
327
328 """
329 sd = scantable(Scantable._copy(self))
330 return sd
331
332 def drop_scan(self, scanid=None):
333 """\
334 Return a new scantable where the specified scan number(s) has(have)
335 been dropped.
336
337 Parameters:
338
339 scanid: a (list of) scan number(s)
340
341 """
342 from asap import _is_sequence_or_number as _is_valid
343 from asap import _to_list
344 from asap import unique
345 if not _is_valid(scanid):
346 raise RuntimeError( 'Please specify a scanno to drop from the'
347 ' scantable' )
348 scanid = _to_list(scanid)
349 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
350 for sid in scanid: allscans.remove(sid)
351 if len(allscans) == 0:
352 raise ValueError("Can't remove all scans")
353 sel = selector(scans=allscans)
354 return self._select_copy(sel)
355
356 def _select_copy(self, selection):
357 orig = self.get_selection()
358 self.set_selection(orig+selection)
359 cp = self.copy()
360 self.set_selection(orig)
361 return cp
362
363 def get_scan(self, scanid=None):
364 """\
365 Return a specific scan (by scanno) or collection of scans (by
366 source name) in a new scantable.
367
368 *Note*:
369
370 See scantable.drop_scan() for the inverse operation.
371
372 Parameters:
373
374 scanid: a (list of) scanno or a source name, unix-style
375 patterns are accepted for source name matching, e.g.
376 '*_R' gets all 'ref scans
377
378 Example::
379
380 # get all scans containing the source '323p459'
381 newscan = scan.get_scan('323p459')
382 # get all 'off' scans
383 refscans = scan.get_scan('*_R')
384 # get a susbset of scans by scanno (as listed in scan.summary())
385 newscan = scan.get_scan([0, 2, 7, 10])
386
387 """
388 if scanid is None:
389 raise RuntimeError( 'Please specify a scan no or name to '
390 'retrieve from the scantable' )
391 try:
392 bsel = self.get_selection()
393 sel = selector()
394 if type(scanid) is str:
395 sel.set_name(scanid)
396 return self._select_copy(sel)
397 elif type(scanid) is int:
398 sel.set_scans([scanid])
399 return self._select_copy(sel)
400 elif type(scanid) is list:
401 sel.set_scans(scanid)
402 return self._select_copy(sel)
403 else:
404 msg = "Illegal scanid type, use 'int' or 'list' if ints."
405 raise TypeError(msg)
406 except RuntimeError:
407 raise
408
409 def __str__(self):
410 tempFile = tempfile.NamedTemporaryFile()
411 Scantable._summary(self, tempFile.name)
412 tempFile.seek(0)
413 asaplog.clear()
414 return tempFile.file.read()
415
416 @asaplog_post_dec
417 def summary(self, filename=None):
418 """\
419 Print a summary of the contents of this scantable.
420
421 Parameters:
422
423 filename: the name of a file to write the putput to
424 Default - no file output
425
426 """
427 if filename is not None:
428 if filename is "":
429 filename = 'scantable_summary.txt'
430 from os.path import expandvars, isdir
431 filename = expandvars(filename)
432 if isdir(filename):
433 msg = "Illegal file name '%s'." % (filename)
434 raise IOError(msg)
435 else:
436 filename = ""
437 Scantable._summary(self, filename)
438
439 def get_spectrum(self, rowno):
440 """Return the spectrum for the current row in the scantable as a list.
441
442 Parameters:
443
444 rowno: the row number to retrieve the spectrum from
445
446 """
447 return self._getspectrum(rowno)
448
449 def get_mask(self, rowno):
450 """Return the mask for the current row in the scantable as a list.
451
452 Parameters:
453
454 rowno: the row number to retrieve the mask from
455
456 """
457 return self._getmask(rowno)
458
459 def set_spectrum(self, spec, rowno):
460 """Set the spectrum for the current row in the scantable.
461
462 Parameters:
463
464 spec: the new spectrum
465
466 rowno: the row number to set the spectrum for
467
468 """
469 assert(len(spec) == self.nchan(self.getif(rowno)))
470 return self._setspectrum(spec, rowno)
471
472 def get_coordinate(self, rowno):
473 """Return the (spectral) coordinate for a a given 'rowno'.
474
475 *Note*:
476
477 * This coordinate is only valid until a scantable method modifies
478 the frequency axis.
479 * This coordinate does contain the original frequency set-up
480 NOT the new frame. The conversions however are done using the user
481 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
482 use scantable.freq_align first. Without it there is no closure,
483 i.e.::
484
485 c = myscan.get_coordinate(0)
486 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
487
488 Parameters:
489
490 rowno: the row number for the spectral coordinate
491
492 """
493 return coordinate(Scantable.get_coordinate(self, rowno))
494
495 def get_selection(self):
496 """\
497 Get the selection object currently set on this scantable.
498
499 Example::
500
501 sel = scan.get_selection()
502 sel.set_ifs(0) # select IF 0
503 scan.set_selection(sel) # apply modified selection
504
505 """
506 return selector(self._getselection())
507
508 def set_selection(self, selection=None, **kw):
509 """\
510 Select a subset of the data. All following operations on this scantable
511 are only applied to thi selection.
512
513 Parameters:
514
515 selection: a selector object (default unset the selection), or
516 any combination of 'pols', 'ifs', 'beams', 'scans',
517 'cycles', 'name', 'query'
518
519 Examples::
520
521 sel = selector() # create a selection object
522 self.set_scans([0, 3]) # select SCANNO 0 and 3
523 scan.set_selection(sel) # set the selection
524 scan.summary() # will only print summary of scanno 0 an 3
525 scan.set_selection() # unset the selection
526 # or the equivalent
527 scan.set_selection(scans=[0,3])
528 scan.summary() # will only print summary of scanno 0 an 3
529 scan.set_selection() # unset the selection
530
531 """
532 if selection is None:
533 # reset
534 if len(kw) == 0:
535 selection = selector()
536 else:
537 # try keywords
538 for k in kw:
539 if k not in selector.fields:
540 raise KeyError("Invalid selection key '%s', "
541 "valid keys are %s" % (k,
542 selector.fields))
543 selection = selector(**kw)
544 self._setselection(selection)
545
546 def get_row(self, row=0, insitu=None):
547 """\
548 Select a row in the scantable.
549 Return a scantable with single row.
550
551 Parameters:
552
553 row: row no of integration, default is 0.
554 insitu: if False a new scantable is returned. Otherwise, the
555 scaling is done in-situ. The default is taken from .asaprc
556 (False)
557
558 """
559 if insitu is None:
560 insitu = rcParams['insitu']
561 if not insitu:
562 workscan = self.copy()
563 else:
564 workscan = self
565 # Select a row
566 sel = selector()
567 sel.set_rows([row])
568 workscan.set_selection(sel)
569 if not workscan.nrow() == 1:
570 msg = "Could not identify single row. %d rows selected." \
571 % (workscan.nrow())
572 raise RuntimeError(msg)
573 if insitu:
574 self._assign(workscan)
575 else:
576 return workscan
577
578 @asaplog_post_dec
579 def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
580 """\
581 Determine the specified statistic of the current beam/if/pol
582 Takes a 'mask' as an optional parameter to specify which
583 channels should be excluded.
584
585 Parameters:
586
587 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
588 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
589
590 mask: an optional mask specifying where the statistic
591 should be determined.
592
593 form: format string to print statistic values
594
595 row: row number of spectrum to process.
596 (default is None: for all rows)
597
598 Example:
599 scan.set_unit('channel')
600 msk = scan.create_mask([100, 200], [500, 600])
601 scan.stats(stat='mean', mask=m)
602
603 """
604 mask = mask or []
605 if not self._check_ifs():
606 raise ValueError("Cannot apply mask as the IFs have different "
607 "number of channels. Please use setselection() "
608 "to select individual IFs")
609 rtnabc = False
610 if stat.lower().endswith('_abc'): rtnabc = True
611 getchan = False
612 if stat.lower().startswith('min') or stat.lower().startswith('max'):
613 chan = self._math._minmaxchan(self, mask, stat)
614 getchan = True
615 statvals = []
616 if not rtnabc:
617 if row == None:
618 statvals = self._math._stats(self, mask, stat)
619 else:
620 statvals = self._math._statsrow(self, mask, stat, int(row))
621
622 #def cb(i):
623 # return statvals[i]
624
625 #return self._row_callback(cb, stat)
626
627 label=stat
628 #callback=cb
629 out = ""
630 #outvec = []
631 sep = '-'*50
632
633 if row == None:
634 rows = xrange(self.nrow())
635 elif isinstance(row, int):
636 rows = [ row ]
637
638 for i in rows:
639 refstr = ''
640 statunit= ''
641 if getchan:
642 qx, qy = self.chan2data(rowno=i, chan=chan[i])
643 if rtnabc:
644 statvals.append(qx['value'])
645 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
646 statunit= '['+qx['unit']+']'
647 else:
648 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
649
650 tm = self._gettime(i)
651 src = self._getsourcename(i)
652 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
653 out += 'Time[%s]:\n' % (tm)
654 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (self.getbeam(i))
655 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
656 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
657 #outvec.append(callback(i))
658 if len(rows) > 1:
659 # out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
660 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
661 else:
662 # out += ('= %'+form) % (outvec[0]) +' '+refstr+'\n'
663 out += ('= %'+form) % (statvals[0]) +' '+refstr+'\n'
664 out += sep+"\n"
665
666 import os
667 if os.environ.has_key( 'USER' ):
668 usr = os.environ['USER']
669 else:
670 import commands
671 usr = commands.getoutput( 'whoami' )
672 tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
673 f = open(tmpfile,'w')
674 print >> f, sep
675 print >> f, ' %s %s' % (label, statunit)
676 print >> f, sep
677 print >> f, out
678 f.close()
679 f = open(tmpfile,'r')
680 x = f.readlines()
681 f.close()
682 asaplog.push(''.join(x), False)
683
684 return statvals
685
686 def chan2data(self, rowno=0, chan=0):
687 """\
688 Returns channel/frequency/velocity and spectral value
689 at an arbitrary row and channel in the scantable.
690
691 Parameters:
692
693 rowno: a row number in the scantable. Default is the
694 first row, i.e. rowno=0
695
696 chan: a channel in the scantable. Default is the first
697 channel, i.e. pos=0
698
699 """
700 if isinstance(rowno, int) and isinstance(chan, int):
701 qx = {'unit': self.get_unit(),
702 'value': self._getabcissa(rowno)[chan]}
703 qy = {'unit': self.get_fluxunit(),
704 'value': self._getspectrum(rowno)[chan]}
705 return qx, qy
706
707 def stddev(self, mask=None):
708 """\
709 Determine the standard deviation of the current beam/if/pol
710 Takes a 'mask' as an optional parameter to specify which
711 channels should be excluded.
712
713 Parameters:
714
715 mask: an optional mask specifying where the standard
716 deviation should be determined.
717
718 Example::
719
720 scan.set_unit('channel')
721 msk = scan.create_mask([100, 200], [500, 600])
722 scan.stddev(mask=m)
723
724 """
725 return self.stats(stat='stddev', mask=mask);
726
727
728 def get_column_names(self):
729 """\
730 Return a list of column names, which can be used for selection.
731 """
732 return list(Scantable.get_column_names(self))
733
734 def get_tsys(self, row=-1):
735 """\
736 Return the System temperatures.
737
738 Parameters:
739
740 row: the rowno to get the information for. (default all rows)
741
742 Returns:
743
744 a list of Tsys values for the current selection
745
746 """
747 if row > -1:
748 return self._get_column(self._gettsys, row)
749 return self._row_callback(self._gettsys, "Tsys")
750
751 def get_tsysspectrum(self, row=-1):
752 """\
753 Return the channel dependent system temperatures.
754
755 Parameters:
756
757 row: the rowno to get the information for. (default all rows)
758
759 Returns:
760
761 a list of Tsys values for the current selection
762
763 """
764 return self._get_column( self._gettsysspectrum, row )
765
766 def get_weather(self, row=-1):
767 """\
768 Return the weather informations.
769
770 Parameters:
771
772 row: the rowno to get the information for. (default all rows)
773
774 Returns:
775
776 a dict or list of of dicts of values for the current selection
777
778 """
779
780 values = self._get_column(self._get_weather, row)
781 if row > -1:
782 return {'temperature': values[0],
783 'pressure': values[1], 'humidity' : values[2],
784 'windspeed' : values[3], 'windaz' : values[4]
785 }
786 else:
787 out = []
788 for r in values:
789
790 out.append({'temperature': r[0],
791 'pressure': r[1], 'humidity' : r[2],
792 'windspeed' : r[3], 'windaz' : r[4]
793 })
794 return out
795
796 def _row_callback(self, callback, label):
797 out = ""
798 outvec = []
799 sep = '-'*50
800 for i in range(self.nrow()):
801 tm = self._gettime(i)
802 src = self._getsourcename(i)
803 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
804 out += 'Time[%s]:\n' % (tm)
805 if self.nbeam(-1) > 1:
806 out += ' Beam[%d] ' % (self.getbeam(i))
807 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
808 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
809 outvec.append(callback(i))
810 out += '= %3.3f\n' % (outvec[i])
811 out += sep+'\n'
812
813 asaplog.push(sep)
814 asaplog.push(" %s" % (label))
815 asaplog.push(sep)
816 asaplog.push(out)
817 asaplog.post()
818 return outvec
819
820 def _get_column(self, callback, row=-1, *args):
821 """
822 """
823 if row == -1:
824 return [callback(i, *args) for i in range(self.nrow())]
825 else:
826 if 0 <= row < self.nrow():
827 return callback(row, *args)
828
829
830 def get_time(self, row=-1, asdatetime=False, prec=-1):
831 """\
832 Get a list of time stamps for the observations.
833 Return a datetime object or a string (default) for each
834 integration time stamp in the scantable.
835
836 Parameters:
837
838 row: row no of integration. Default -1 return all rows
839
840 asdatetime: return values as datetime objects rather than strings
841
842 prec: number of digits shown. Default -1 to automatic
843 calculation.
844 Note this number is equals to the digits of MVTime,
845 i.e., 0<prec<3: dates with hh:: only,
846 <5: with hh:mm:, <7 or 0: with hh:mm:ss,
847 and 6> : with hh:mm:ss.tt... (prec-6 t's added)
848
849 """
850 from datetime import datetime
851 if prec < 0:
852 # automagically set necessary precision +1
853 prec = 7 - \
854 numpy.floor(numpy.log10(numpy.min(self.get_inttime(row))))
855 prec = max(6, int(prec))
856 else:
857 prec = max(0, prec)
858 if asdatetime:
859 #precision can be 1 millisecond at max
860 prec = min(12, prec)
861
862 times = self._get_column(self._gettime, row, prec)
863 if not asdatetime:
864 return times
865 format = "%Y/%m/%d/%H:%M:%S.%f"
866 if prec < 7:
867 nsub = 1 + (((6-prec)/2) % 3)
868 substr = [".%f","%S","%M"]
869 for i in range(nsub):
870 format = format.replace(substr[i],"")
871 if isinstance(times, list):
872 return [datetime.strptime(i, format) for i in times]
873 else:
874 return datetime.strptime(times, format)
875
876
877 def get_inttime(self, row=-1):
878 """\
879 Get a list of integration times for the observations.
880 Return a time in seconds for each integration in the scantable.
881
882 Parameters:
883
884 row: row no of integration. Default -1 return all rows.
885
886 """
887 return self._get_column(self._getinttime, row)
888
889
890 def get_sourcename(self, row=-1):
891 """\
892 Get a list source names for the observations.
893 Return a string for each integration in the scantable.
894 Parameters:
895
896 row: row no of integration. Default -1 return all rows.
897
898 """
899 return self._get_column(self._getsourcename, row)
900
901 def get_elevation(self, row=-1):
902 """\
903 Get a list of elevations for the observations.
904 Return a float for each integration in the scantable.
905
906 Parameters:
907
908 row: row no of integration. Default -1 return all rows.
909
910 """
911 return self._get_column(self._getelevation, row)
912
913 def get_azimuth(self, row=-1):
914 """\
915 Get a list of azimuths for the observations.
916 Return a float for each integration in the scantable.
917
918 Parameters:
919 row: row no of integration. Default -1 return all rows.
920
921 """
922 return self._get_column(self._getazimuth, row)
923
924 def get_parangle(self, row=-1):
925 """\
926 Get a list of parallactic angles for the observations.
927 Return a float for each integration in the scantable.
928
929 Parameters:
930
931 row: row no of integration. Default -1 return all rows.
932
933 """
934 return self._get_column(self._getparangle, row)
935
936 def get_direction(self, row=-1):
937 """
938 Get a list of Positions on the sky (direction) for the observations.
939 Return a string for each integration in the scantable.
940
941 Parameters:
942
943 row: row no of integration. Default -1 return all rows
944
945 """
946 return self._get_column(self._getdirection, row)
947
948 def get_directionval(self, row=-1):
949 """\
950 Get a list of Positions on the sky (direction) for the observations.
951 Return a float for each integration in the scantable.
952
953 Parameters:
954
955 row: row no of integration. Default -1 return all rows
956
957 """
958 return self._get_column(self._getdirectionvec, row)
959
960 @asaplog_post_dec
961 def set_unit(self, unit='channel'):
962 """\
963 Set the unit for all following operations on this scantable
964
965 Parameters:
966
967 unit: optional unit, default is 'channel'. Use one of '*Hz',
968 'km/s', 'channel' or equivalent ''
969
970 """
971 varlist = vars()
972 if unit in ['', 'pixel', 'channel']:
973 unit = ''
974 inf = list(self._getcoordinfo())
975 inf[0] = unit
976 self._setcoordinfo(inf)
977 self._add_history("set_unit", varlist)
978
979 @asaplog_post_dec
980 def set_instrument(self, instr):
981 """\
982 Set the instrument for subsequent processing.
983
984 Parameters:
985
986 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
987 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
988
989 """
990 self._setInstrument(instr)
991 self._add_history("set_instument", vars())
992
993 @asaplog_post_dec
994 def set_feedtype(self, feedtype):
995 """\
996 Overwrite the feed type, which might not be set correctly.
997
998 Parameters:
999
1000 feedtype: 'linear' or 'circular'
1001
1002 """
1003 self._setfeedtype(feedtype)
1004 self._add_history("set_feedtype", vars())
1005
1006 @asaplog_post_dec
1007 def set_doppler(self, doppler='RADIO'):
1008 """\
1009 Set the doppler for all following operations on this scantable.
1010
1011 Parameters:
1012
1013 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
1014
1015 """
1016 varlist = vars()
1017 inf = list(self._getcoordinfo())
1018 inf[2] = doppler
1019 self._setcoordinfo(inf)
1020 self._add_history("set_doppler", vars())
1021
1022 @asaplog_post_dec
1023 def set_freqframe(self, frame=None):
1024 """\
1025 Set the frame type of the Spectral Axis.
1026
1027 Parameters:
1028
1029 frame: an optional frame type, default 'LSRK'. Valid frames are:
1030 'TOPO', 'LSRD', 'LSRK', 'BARY',
1031 'GEO', 'GALACTO', 'LGROUP', 'CMB'
1032
1033 Example::
1034
1035 scan.set_freqframe('BARY')
1036
1037 """
1038 frame = frame or rcParams['scantable.freqframe']
1039 varlist = vars()
1040 # "REST" is not implemented in casacore
1041 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
1042 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1043 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
1044 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1045
1046 if frame in valid:
1047 inf = list(self._getcoordinfo())
1048 inf[1] = frame
1049 self._setcoordinfo(inf)
1050 self._add_history("set_freqframe", varlist)
1051 else:
1052 msg = "Please specify a valid freq type. Valid types are:\n", valid
1053 raise TypeError(msg)
1054
1055 @asaplog_post_dec
1056 def set_dirframe(self, frame=""):
1057 """\
1058 Set the frame type of the Direction on the sky.
1059
1060 Parameters:
1061
1062 frame: an optional frame type, default ''. Valid frames are:
1063 'J2000', 'B1950', 'GALACTIC'
1064
1065 Example:
1066
1067 scan.set_dirframe('GALACTIC')
1068
1069 """
1070 varlist = vars()
1071 Scantable.set_dirframe(self, frame)
1072 self._add_history("set_dirframe", varlist)
1073
1074 def get_unit(self):
1075 """\
1076 Get the default unit set in this scantable
1077
1078 Returns:
1079
1080 A unit string
1081
1082 """
1083 inf = self._getcoordinfo()
1084 unit = inf[0]
1085 if unit == '': unit = 'channel'
1086 return unit
1087
1088 @asaplog_post_dec
1089 def get_abcissa(self, rowno=0):
1090 """\
1091 Get the abcissa in the current coordinate setup for the currently
1092 selected Beam/IF/Pol
1093
1094 Parameters:
1095
1096 rowno: an optional row number in the scantable. Default is the
1097 first row, i.e. rowno=0
1098
1099 Returns:
1100
1101 The abcissa values and the format string (as a dictionary)
1102
1103 """
1104 abc = self._getabcissa(rowno)
1105 lbl = self._getabcissalabel(rowno)
1106 return abc, lbl
1107
1108 @asaplog_post_dec
1109 def flag(self, mask=None, unflag=False, row=-1):
1110 """\
1111 Flag the selected data using an optional channel mask.
1112
1113 Parameters:
1114
1115 mask: an optional channel mask, created with create_mask. Default
1116 (no mask) is all channels.
1117
1118 unflag: if True, unflag the data
1119
1120 row: an optional row number in the scantable.
1121 Default -1 flags all rows
1122
1123 """
1124 varlist = vars()
1125 mask = mask or []
1126 self._flag(row, mask, unflag)
1127 self._add_history("flag", varlist)
1128
1129 @asaplog_post_dec
1130 def flag_row(self, rows=None, unflag=False):
1131 """\
1132 Flag the selected data in row-based manner.
1133
1134 Parameters:
1135
1136 rows: list of row numbers to be flagged. Default is no row
1137 (must be explicitly specified to execute row-based
1138 flagging).
1139
1140 unflag: if True, unflag the data.
1141
1142 """
1143 varlist = vars()
1144 if rows is None:
1145 rows = []
1146 self._flag_row(rows, unflag)
1147 self._add_history("flag_row", varlist)
1148
1149 @asaplog_post_dec
1150 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
1151 """\
1152 Flag the selected data outside a specified range (in channel-base)
1153
1154 Parameters:
1155
1156 uthres: upper threshold.
1157
1158 dthres: lower threshold
1159
1160 clipoutside: True for flagging data outside the range
1161 [dthres:uthres].
1162 False for flagging data inside the range.
1163
1164 unflag: if True, unflag the data.
1165
1166 """
1167 varlist = vars()
1168 self._clip(uthres, dthres, clipoutside, unflag)
1169 self._add_history("clip", varlist)
1170
1171 @asaplog_post_dec
1172 def lag_flag(self, start, end, unit="MHz", insitu=None):
1173 """\
1174 Flag the data in 'lag' space by providing a frequency to remove.
1175 Flagged data in the scantable get interpolated over the region.
1176 No taper is applied.
1177
1178 Parameters:
1179
1180 start: the start frequency (really a period within the
1181 bandwidth) or period to remove
1182
1183 end: the end frequency or period to remove
1184
1185 unit: the frequency unit (default 'MHz') or '' for
1186 explicit lag channels
1187
1188 *Notes*:
1189
1190 It is recommended to flag edges of the band or strong
1191 signals beforehand.
1192
1193 """
1194 if insitu is None: insitu = rcParams['insitu']
1195 self._math._setinsitu(insitu)
1196 varlist = vars()
1197 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1198 if not (unit == "" or base.has_key(unit)):
1199 raise ValueError("%s is not a valid unit." % unit)
1200 if unit == "":
1201 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1202 else:
1203 s = scantable(self._math._lag_flag(self, start*base[unit],
1204 end*base[unit], "frequency"))
1205 s._add_history("lag_flag", varlist)
1206 if insitu:
1207 self._assign(s)
1208 else:
1209 return s
1210
1211 @asaplog_post_dec
1212 def fft(self, rowno=None, mask=None, getrealimag=False):
1213 """\
1214 Apply FFT to the spectra.
1215 Flagged data in the scantable get interpolated over the region.
1216
1217 Parameters:
1218
1219 rowno: The row number(s) to be processed. int, list
1220 and tuple are accepted. By default (None), FFT
1221 is applied to the whole data.
1222
1223 mask: Auxiliary channel mask(s). Given as a boolean
1224 list, it is applied to all specified rows.
1225 A list of boolean lists can also be used to
1226 apply different masks. In the latter case, the
1227 length of 'mask' must be the same as that of
1228 'rowno'. The default is None.
1229
1230 getrealimag: If True, returns the real and imaginary part
1231 values of the complex results.
1232 If False (the default), returns the amplitude
1233 (absolute value) normalised with Ndata/2 and
1234 phase (argument, in unit of radian).
1235
1236 Returns:
1237
1238 A list of dictionaries containing the results for each spectrum.
1239 Each dictionary contains two values, the real and the imaginary
1240 parts when getrealimag = True, or the amplitude(absolute value)
1241 and the phase(argument) when getrealimag = False. The key for
1242 these values are 'real' and 'imag', or 'ampl' and 'phase',
1243 respectively.
1244 """
1245 if rowno is None:
1246 rowno = []
1247 if isinstance(rowno, int):
1248 rowno = [rowno]
1249 elif not (isinstance(rowno, list) or isinstance(rowno, tuple)):
1250 raise TypeError("The row number(s) must be int, list or tuple.")
1251 if len(rowno) == 0: rowno = [i for i in xrange(self.nrow())]
1252
1253 usecommonmask = True
1254
1255 if mask is None:
1256 mask = []
1257 if isinstance(mask, list) or isinstance(mask, tuple):
1258 if len(mask) == 0:
1259 mask = [[]]
1260 else:
1261 if isinstance(mask[0], bool):
1262 if len(mask) != self.nchan(self.getif(rowno[0])):
1263 raise ValueError("The spectra and the mask have "
1264 "different length.")
1265 mask = [mask]
1266 elif isinstance(mask[0], list) or isinstance(mask[0], tuple):
1267 usecommonmask = False
1268 if len(mask) != len(rowno):
1269 raise ValueError("When specifying masks for each "
1270 "spectrum, the numbers of them "
1271 "must be identical.")
1272 for i in xrange(mask):
1273 if len(mask[i]) != self.nchan(self.getif(rowno[i])):
1274 raise ValueError("The spectra and the mask have "
1275 "different length.")
1276 else:
1277 raise TypeError("The mask must be a boolean list or "
1278 "a list of boolean list.")
1279 else:
1280 raise TypeError("The mask must be a boolean list or a list of "
1281 "boolean list.")
1282
1283 res = []
1284
1285 imask = 0
1286 for whichrow in rowno:
1287 fspec = self._fft(whichrow, mask[imask], getrealimag)
1288 nspec = len(fspec)
1289
1290 i = 0
1291 v1 = []
1292 v2 = []
1293 reselem = {"real":[],"imag":[]} if getrealimag \
1294 else {"ampl":[],"phase":[]}
1295
1296 while (i < nspec):
1297 v1.append(fspec[i])
1298 v2.append(fspec[i+1])
1299 i += 2
1300
1301 if getrealimag:
1302 reselem["real"] += v1
1303 reselem["imag"] += v2
1304 else:
1305 reselem["ampl"] += v1
1306 reselem["phase"] += v2
1307
1308 res.append(reselem)
1309
1310 if not usecommonmask:
1311 imask += 1
1312
1313 return res
1314
1315 @asaplog_post_dec
1316 def create_mask(self, *args, **kwargs):
1317 """\
1318 Compute and return a mask based on [min, max] windows.
1319 The specified windows are to be INCLUDED, when the mask is
1320 applied.
1321
1322 Parameters:
1323
1324 [min, max], [min2, max2], ...
1325 Pairs of start/end points (inclusive)specifying the regions
1326 to be masked
1327
1328 invert: optional argument. If specified as True,
1329 return an inverted mask, i.e. the regions
1330 specified are EXCLUDED
1331
1332 row: create the mask using the specified row for
1333 unit conversions, default is row=0
1334 only necessary if frequency varies over rows.
1335
1336 Examples::
1337
1338 scan.set_unit('channel')
1339 # a)
1340 msk = scan.create_mask([400, 500], [800, 900])
1341 # masks everything outside 400 and 500
1342 # and 800 and 900 in the unit 'channel'
1343
1344 # b)
1345 msk = scan.create_mask([400, 500], [800, 900], invert=True)
1346 # masks the regions between 400 and 500
1347 # and 800 and 900 in the unit 'channel'
1348
1349 # c)
1350 #mask only channel 400
1351 msk = scan.create_mask([400])
1352
1353 """
1354 row = kwargs.get("row", 0)
1355 data = self._getabcissa(row)
1356 u = self._getcoordinfo()[0]
1357 if u == "":
1358 u = "channel"
1359 msg = "The current mask window unit is %s" % u
1360 i = self._check_ifs()
1361 if not i:
1362 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1363 asaplog.push(msg)
1364 n = len(data)
1365 msk = _n_bools(n, False)
1366 # test if args is a 'list' or a 'normal *args - UGLY!!!
1367
1368 ws = (isinstance(args[-1][-1], int)
1369 or isinstance(args[-1][-1], float)) and args or args[0]
1370 for window in ws:
1371 if len(window) == 1:
1372 window = [window[0], window[0]]
1373 if len(window) == 0 or len(window) > 2:
1374 raise ValueError("A window needs to be defined as "
1375 "[start(, end)]")
1376 if window[0] > window[1]:
1377 tmp = window[0]
1378 window[0] = window[1]
1379 window[1] = tmp
1380 for i in range(n):
1381 if data[i] >= window[0] and data[i] <= window[1]:
1382 msk[i] = True
1383 if kwargs.has_key('invert'):
1384 if kwargs.get('invert'):
1385 msk = mask_not(msk)
1386 return msk
1387
1388 def get_masklist(self, mask=None, row=0, silent=False):
1389 """\
1390 Compute and return a list of mask windows, [min, max].
1391
1392 Parameters:
1393
1394 mask: channel mask, created with create_mask.
1395
1396 row: calcutate the masklist using the specified row
1397 for unit conversions, default is row=0
1398 only necessary if frequency varies over rows.
1399
1400 Returns:
1401
1402 [min, max], [min2, max2], ...
1403 Pairs of start/end points (inclusive)specifying
1404 the masked regions
1405
1406 """
1407 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1408 raise TypeError("The mask should be list or tuple.")
1409 if len(mask) <= 0:
1410 raise TypeError("The mask elements should be > 0")
1411 data = self._getabcissa(row)
1412 if len(data) != len(mask):
1413 msg = "Number of channels in scantable != number of mask elements"
1414 raise TypeError(msg)
1415 u = self._getcoordinfo()[0]
1416 if u == "":
1417 u = "channel"
1418 msg = "The current mask window unit is %s" % u
1419 i = self._check_ifs()
1420 if not i:
1421 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1422 if not silent:
1423 asaplog.push(msg)
1424 masklist = []
1425 ist, ien = None, None
1426 ist, ien=self.get_mask_indices(mask)
1427 if ist is not None and ien is not None:
1428 for i in xrange(len(ist)):
1429 range=[data[ist[i]],data[ien[i]]]
1430 range.sort()
1431 masklist.append([range[0],range[1]])
1432 return masklist
1433
1434 def get_mask_indices(self, mask=None):
1435 """\
1436 Compute and Return lists of mask start indices and mask end indices.
1437
1438 Parameters:
1439
1440 mask: channel mask, created with create_mask.
1441
1442 Returns:
1443
1444 List of mask start indices and that of mask end indices,
1445 i.e., [istart1,istart2,....], [iend1,iend2,....].
1446
1447 """
1448 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1449 raise TypeError("The mask should be list or tuple.")
1450 if len(mask) <= 0:
1451 raise TypeError("The mask elements should be > 0")
1452 istart = []
1453 iend = []
1454 if mask[0]:
1455 istart.append(0)
1456 for i in range(len(mask)-1):
1457 if not mask[i] and mask[i+1]:
1458 istart.append(i+1)
1459 elif mask[i] and not mask[i+1]:
1460 iend.append(i)
1461 if mask[len(mask)-1]:
1462 iend.append(len(mask)-1)
1463 if len(istart) != len(iend):
1464 raise RuntimeError("Numbers of mask start != mask end.")
1465 for i in range(len(istart)):
1466 if istart[i] > iend[i]:
1467 raise RuntimeError("Mask start index > mask end index")
1468 break
1469 return istart,iend
1470
1471 @asaplog_post_dec
1472 def parse_maskexpr(self, maskstring):
1473 """
1474 Parse CASA type mask selection syntax (IF dependent).
1475
1476 Parameters:
1477 maskstring : A string mask selection expression.
1478 A comma separated selections mean different IF -
1479 channel combinations. IFs and channel selections
1480 are partitioned by a colon, ':'.
1481 examples:
1482 '' = all IFs (all channels)
1483 '<2,4~6,9' = IFs 0,1,4,5,6,9 (all channels)
1484 '3:3~45;60' = channels 3 to 45 and 60 in IF 3
1485 '0~1:2~6,8' = channels 2 to 6 in IFs 0,1, and
1486 all channels in IF8
1487 Returns:
1488 A dictionary of selected (valid) IF and masklist pairs,
1489 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
1490 """
1491 if not isinstance(maskstring,str):
1492 asaplog.post()
1493 asaplog.push("Mask expression should be a string.")
1494 asaplog.post("ERROR")
1495
1496 valid_ifs = self.getifnos()
1497 frequnit = self.get_unit()
1498 seldict = {}
1499 if maskstring == "":
1500 maskstring = str(valid_ifs)[1:-1]
1501 ## split each selection "IF range[:CHAN range]"
1502 sellist = maskstring.split(',')
1503 for currselstr in sellist:
1504 selset = currselstr.split(':')
1505 # spw and mask string (may include ~, < or >)
1506 spwmasklist = self._parse_selection(selset[0], typestr='integer',
1507 minval=min(valid_ifs),
1508 maxval=max(valid_ifs))
1509 for spwlist in spwmasklist:
1510 selspws = []
1511 for ispw in range(spwlist[0],spwlist[1]+1):
1512 # Put into the list only if ispw exists
1513 if valid_ifs.count(ispw):
1514 selspws.append(ispw)
1515 del spwmasklist, spwlist
1516
1517 # parse frequency mask list
1518 if len(selset) > 1:
1519 freqmasklist = self._parse_selection(selset[1], typestr='float',
1520 offset=0.)
1521 else:
1522 # want to select the whole spectrum
1523 freqmasklist = [None]
1524
1525 ## define a dictionary of spw - masklist combination
1526 for ispw in selspws:
1527 #print "working on", ispw
1528 spwstr = str(ispw)
1529 if len(selspws) == 0:
1530 # empty spw
1531 continue
1532 else:
1533 ## want to get min and max of the spw and
1534 ## offset to set for '<' and '>'
1535 if frequnit == 'channel':
1536 minfreq = 0
1537 maxfreq = self.nchan(ifno=ispw)
1538 offset = 0.5
1539 else:
1540 ## This is ugly part. need improvement
1541 for ifrow in xrange(self.nrow()):
1542 if self.getif(ifrow) == ispw:
1543 #print "IF",ispw,"found in row =",ifrow
1544 break
1545 freqcoord = self.get_coordinate(ifrow)
1546 freqs = self._getabcissa(ifrow)
1547 minfreq = min(freqs)
1548 maxfreq = max(freqs)
1549 if len(freqs) == 1:
1550 offset = 0.5
1551 elif frequnit.find('Hz') > 0:
1552 offset = abs(freqcoord.to_frequency(1,
1553 unit=frequnit)
1554 -freqcoord.to_frequency(0,
1555 unit=frequnit)
1556 )*0.5
1557 elif frequnit.find('m/s') > 0:
1558 offset = abs(freqcoord.to_velocity(1,
1559 unit=frequnit)
1560 -freqcoord.to_velocity(0,
1561 unit=frequnit)
1562 )*0.5
1563 else:
1564 asaplog.post()
1565 asaplog.push("Invalid frequency unit")
1566 asaplog.post("ERROR")
1567 del freqs, freqcoord, ifrow
1568 for freq in freqmasklist:
1569 selmask = freq or [minfreq, maxfreq]
1570 if selmask[0] == None:
1571 ## selection was "<freq[1]".
1572 if selmask[1] < minfreq:
1573 ## avoid adding region selection
1574 selmask = None
1575 else:
1576 selmask = [minfreq,selmask[1]-offset]
1577 elif selmask[1] == None:
1578 ## selection was ">freq[0]"
1579 if selmask[0] > maxfreq:
1580 ## avoid adding region selection
1581 selmask = None
1582 else:
1583 selmask = [selmask[0]+offset,maxfreq]
1584 if selmask:
1585 if not seldict.has_key(spwstr):
1586 # new spw selection
1587 seldict[spwstr] = []
1588 seldict[spwstr] += [selmask]
1589 del minfreq,maxfreq,offset,freq,selmask
1590 del spwstr
1591 del freqmasklist
1592 del valid_ifs
1593 if len(seldict) == 0:
1594 asaplog.post()
1595 asaplog.push("No valid selection in the mask expression: "
1596 +maskstring)
1597 asaplog.post("WARN")
1598 return None
1599 msg = "Selected masklist:\n"
1600 for sif, lmask in seldict.iteritems():
1601 msg += " IF"+sif+" - "+str(lmask)+"\n"
1602 asaplog.push(msg)
1603 return seldict
1604
1605 @asaplog_post_dec
1606 def parse_idx_selection(self, mode, selexpr):
1607 """
1608 Parse CASA type mask selection syntax of SCANNO, IFNO, POLNO,
1609 BEAMNO, and row number
1610
1611 Parameters:
1612 mode : which column to select.
1613 ['scan',|'if'|'pol'|'beam'|'row']
1614 selexpr : A comma separated selection expression.
1615 examples:
1616 '' = all (returns [])
1617 '<2,4~6,9' = indices less than 2, 4 to 6 and 9
1618 (returns [0,1,4,5,6,9])
1619 Returns:
1620 A List of selected indices
1621 """
1622 if selexpr == "":
1623 return []
1624 valid_modes = {'s': 'scan', 'i': 'if', 'p': 'pol',
1625 'b': 'beam', 'r': 'row'}
1626 smode = mode.lower()[0]
1627 if not (smode in valid_modes.keys()):
1628 msg = "Invalid mode '%s'. Valid modes are %s" %\
1629 (mode, str(valid_modes.values()))
1630 asaplog.post()
1631 asaplog.push(msg)
1632 asaplog.post("ERROR")
1633 mode = valid_modes[smode]
1634 minidx = None
1635 maxidx = None
1636 if smode == 'r':
1637 minidx = 0
1638 maxidx = self.nrow()
1639 else:
1640 idx = getattr(self,"get"+mode+"nos")()
1641 minidx = min(idx)
1642 maxidx = max(idx)
1643 del idx
1644 sellist = selexpr.split(',')
1645 idxlist = []
1646 for currselstr in sellist:
1647 # single range (may include ~, < or >)
1648 currlist = self._parse_selection(currselstr, typestr='integer',
1649 minval=minidx,maxval=maxidx)
1650 for thelist in currlist:
1651 idxlist += range(thelist[0],thelist[1]+1)
1652 msg = "Selected %s: %s" % (mode.upper()+"NO", str(idxlist))
1653 asaplog.push(msg)
1654 return idxlist
1655
1656 def _parse_selection(self, selstr, typestr='float', offset=0.,
1657 minval=None, maxval=None):
1658 """
1659 Parameters:
1660 selstr : The Selection string, e.g., '<3;5~7;100~103;9'
1661 typestr : The type of the values in returned list
1662 ('integer' or 'float')
1663 offset : The offset value to subtract from or add to
1664 the boundary value if the selection string
1665 includes '<' or '>' [Valid only for typestr='float']
1666 minval, maxval : The minimum/maximum values to set if the
1667 selection string includes '<' or '>'.
1668 The list element is filled with None by default.
1669 Returns:
1670 A list of min/max pair of selections.
1671 Example:
1672 _parse_selection('<3;5~7;9',typestr='int',minval=0)
1673 --> returns [[0,2],[5,7],[9,9]]
1674 _parse_selection('<3;5~7;9',typestr='float',offset=0.5,minval=0)
1675 --> returns [[0.,2.5],[5.0,7.0],[9.,9.]]
1676 """
1677 selgroups = selstr.split(';')
1678 sellists = []
1679 if typestr.lower().startswith('int'):
1680 formatfunc = int
1681 offset = 1
1682 else:
1683 formatfunc = float
1684
1685 for currsel in selgroups:
1686 if currsel.find('~') > 0:
1687 # val0 <= x <= val1
1688 minsel = formatfunc(currsel.split('~')[0].strip())
1689 maxsel = formatfunc(currsel.split('~')[1].strip())
1690 elif currsel.strip().find('<=') > -1:
1691 bound = currsel.split('<=')
1692 try: # try "x <= val"
1693 minsel = minval
1694 maxsel = formatfunc(bound[1].strip())
1695 except ValueError: # now "val <= x"
1696 minsel = formatfunc(bound[0].strip())
1697 maxsel = maxval
1698 elif currsel.strip().find('>=') > -1:
1699 bound = currsel.split('>=')
1700 try: # try "x >= val"
1701 minsel = formatfunc(bound[1].strip())
1702 maxsel = maxval
1703 except ValueError: # now "val >= x"
1704 minsel = minval
1705 maxsel = formatfunc(bound[0].strip())
1706 elif currsel.strip().find('<') > -1:
1707 bound = currsel.split('<')
1708 try: # try "x < val"
1709 minsel = minval
1710 maxsel = formatfunc(bound[1].strip()) \
1711 - formatfunc(offset)
1712 except ValueError: # now "val < x"
1713 minsel = formatfunc(bound[0].strip()) \
1714 + formatfunc(offset)
1715 maxsel = maxval
1716 elif currsel.strip().find('>') > -1:
1717 bound = currsel.split('>')
1718 try: # try "x > val"
1719 minsel = formatfunc(bound[1].strip()) \
1720 + formatfunc(offset)
1721 maxsel = maxval
1722 except ValueError: # now "val > x"
1723 minsel = minval
1724 maxsel = formatfunc(bound[0].strip()) \
1725 - formatfunc(offset)
1726 else:
1727 minsel = formatfunc(currsel)
1728 maxsel = formatfunc(currsel)
1729 sellists.append([minsel,maxsel])
1730 return sellists
1731
1732# def get_restfreqs(self):
1733# """
1734# Get the restfrequency(s) stored in this scantable.
1735# The return value(s) are always of unit 'Hz'
1736# Parameters:
1737# none
1738# Returns:
1739# a list of doubles
1740# """
1741# return list(self._getrestfreqs())
1742
1743 def get_restfreqs(self, ids=None):
1744 """\
1745 Get the restfrequency(s) stored in this scantable.
1746 The return value(s) are always of unit 'Hz'
1747
1748 Parameters:
1749
1750 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1751 be retrieved
1752
1753 Returns:
1754
1755 dictionary containing ids and a list of doubles for each id
1756
1757 """
1758 if ids is None:
1759 rfreqs = {}
1760 idlist = self.getmolnos()
1761 for i in idlist:
1762 rfreqs[i] = list(self._getrestfreqs(i))
1763 return rfreqs
1764 else:
1765 if type(ids) == list or type(ids) == tuple:
1766 rfreqs = {}
1767 for i in ids:
1768 rfreqs[i] = list(self._getrestfreqs(i))
1769 return rfreqs
1770 else:
1771 return list(self._getrestfreqs(ids))
1772
1773 @asaplog_post_dec
1774 def set_restfreqs(self, freqs=None, unit='Hz'):
1775 """\
1776 Set or replace the restfrequency specified and
1777 if the 'freqs' argument holds a scalar,
1778 then that rest frequency will be applied to all the selected
1779 data. If the 'freqs' argument holds
1780 a vector, then it MUST be of equal or smaller length than
1781 the number of IFs (and the available restfrequencies will be
1782 replaced by this vector). In this case, *all* data have
1783 the restfrequency set per IF according
1784 to the corresponding value you give in the 'freqs' vector.
1785 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
1786 IF 1 gets restfreq 2e9.
1787
1788 You can also specify the frequencies via a linecatalog.
1789
1790 Parameters:
1791
1792 freqs: list of rest frequency values or string idenitfiers
1793
1794 unit: unit for rest frequency (default 'Hz')
1795
1796
1797 Example::
1798
1799 # set the given restfrequency for the all currently selected IFs
1800 scan.set_restfreqs(freqs=1.4e9)
1801 # set restfrequencies for the n IFs (n > 1) in the order of the
1802 # list, i.e
1803 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
1804 # len(list_of_restfreqs) == nIF
1805 # for nIF == 1 the following will set multiple restfrequency for
1806 # that IF
1807 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
1808 # set multiple restfrequencies per IF. as a list of lists where
1809 # the outer list has nIF elements, the inner s arbitrary
1810 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
1811
1812 *Note*:
1813
1814 To do more sophisticate Restfrequency setting, e.g. on a
1815 source and IF basis, use scantable.set_selection() before using
1816 this function::
1817
1818 # provided your scantable is called scan
1819 selection = selector()
1820 selection.set_name('ORION*')
1821 selection.set_ifs([1])
1822 scan.set_selection(selection)
1823 scan.set_restfreqs(freqs=86.6e9)
1824
1825 """
1826 varlist = vars()
1827 from asap import linecatalog
1828 # simple value
1829 if isinstance(freqs, int) or isinstance(freqs, float):
1830 self._setrestfreqs([freqs], [""], unit)
1831 # list of values
1832 elif isinstance(freqs, list) or isinstance(freqs, tuple):
1833 # list values are scalars
1834 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
1835 if len(freqs) == 1:
1836 self._setrestfreqs(freqs, [""], unit)
1837 else:
1838 # allow the 'old' mode of setting mulitple IFs
1839 savesel = self._getselection()
1840 sel = self.get_selection()
1841 iflist = self.getifnos()
1842 if len(freqs)>len(iflist):
1843 raise ValueError("number of elements in list of list "
1844 "exeeds the current IF selections")
1845 iflist = self.getifnos()
1846 for i, fval in enumerate(freqs):
1847 sel.set_ifs(iflist[i])
1848 self._setselection(sel)
1849 self._setrestfreqs([fval], [""], unit)
1850 self._setselection(savesel)
1851
1852 # list values are dict, {'value'=, 'name'=)
1853 elif isinstance(freqs[-1], dict):
1854 values = []
1855 names = []
1856 for d in freqs:
1857 values.append(d["value"])
1858 names.append(d["name"])
1859 self._setrestfreqs(values, names, unit)
1860 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
1861 savesel = self._getselection()
1862 sel = self.get_selection()
1863 iflist = self.getifnos()
1864 if len(freqs)>len(iflist):
1865 raise ValueError("number of elements in list of list exeeds"
1866 " the current IF selections")
1867 for i, fval in enumerate(freqs):
1868 sel.set_ifs(iflist[i])
1869 self._setselection(sel)
1870 self._setrestfreqs(fval, [""], unit)
1871 self._setselection(savesel)
1872 # freqs are to be taken from a linecatalog
1873 elif isinstance(freqs, linecatalog):
1874 savesel = self._getselection()
1875 sel = self.get_selection()
1876 for i in xrange(freqs.nrow()):
1877 sel.set_ifs(iflist[i])
1878 self._setselection(sel)
1879 self._setrestfreqs([freqs.get_frequency(i)],
1880 [freqs.get_name(i)], "MHz")
1881 # ensure that we are not iterating past nIF
1882 if i == self.nif()-1: break
1883 self._setselection(savesel)
1884 else:
1885 return
1886 self._add_history("set_restfreqs", varlist)
1887
1888 @asaplog_post_dec
1889 def shift_refpix(self, delta):
1890 """\
1891 Shift the reference pixel of the Spectra Coordinate by an
1892 integer amount.
1893
1894 Parameters:
1895
1896 delta: the amount to shift by
1897
1898 *Note*:
1899
1900 Be careful using this with broadband data.
1901
1902 """
1903 varlist = vars()
1904 Scantable.shift_refpix(self, delta)
1905 s._add_history("shift_refpix", varlist)
1906
1907 @asaplog_post_dec
1908 def history(self, filename=None):
1909 """\
1910 Print the history. Optionally to a file.
1911
1912 Parameters:
1913
1914 filename: The name of the file to save the history to.
1915
1916 """
1917 hist = list(self._gethistory())
1918 out = "-"*80
1919 for h in hist:
1920 if h.startswith("---"):
1921 out = "\n".join([out, h])
1922 else:
1923 items = h.split("##")
1924 date = items[0]
1925 func = items[1]
1926 items = items[2:]
1927 out += "\n"+date+"\n"
1928 out += "Function: %s\n Parameters:" % (func)
1929 for i in items:
1930 if i == '':
1931 continue
1932 s = i.split("=")
1933 out += "\n %s = %s" % (s[0], s[1])
1934 out = "\n".join([out, "-"*80])
1935 if filename is not None:
1936 if filename is "":
1937 filename = 'scantable_history.txt'
1938 import os
1939 filename = os.path.expandvars(os.path.expanduser(filename))
1940 if not os.path.isdir(filename):
1941 data = open(filename, 'w')
1942 data.write(out)
1943 data.close()
1944 else:
1945 msg = "Illegal file name '%s'." % (filename)
1946 raise IOError(msg)
1947 return page(out)
1948
1949 #
1950 # Maths business
1951 #
1952 @asaplog_post_dec
1953 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
1954 """\
1955 Return the (time) weighted average of a scan. Scans will be averaged
1956 only if the source direction (RA/DEC) is within 1' otherwise
1957
1958 *Note*:
1959
1960 in channels only - align if necessary
1961
1962 Parameters:
1963
1964 mask: an optional mask (only used for 'var' and 'tsys'
1965 weighting)
1966
1967 scanav: True averages each scan separately
1968 False (default) averages all scans together,
1969
1970 weight: Weighting scheme.
1971 'none' (mean no weight)
1972 'var' (1/var(spec) weighted)
1973 'tsys' (1/Tsys**2 weighted)
1974 'tint' (integration time weighted)
1975 'tintsys' (Tint/Tsys**2)
1976 'median' ( median averaging)
1977 The default is 'tint'
1978
1979 align: align the spectra in velocity before averaging. It takes
1980 the time of the first spectrum as reference time.
1981
1982 Example::
1983
1984 # time average the scantable without using a mask
1985 newscan = scan.average_time()
1986
1987 """
1988 varlist = vars()
1989 weight = weight or 'TINT'
1990 mask = mask or ()
1991 scanav = (scanav and 'SCAN') or 'NONE'
1992 scan = (self, )
1993
1994 if align:
1995 scan = (self.freq_align(insitu=False), )
1996 s = None
1997 if weight.upper() == 'MEDIAN':
1998 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1999 scanav))
2000 else:
2001 s = scantable(self._math._average(scan, mask, weight.upper(),
2002 scanav))
2003 s._add_history("average_time", varlist)
2004 return s
2005
2006 @asaplog_post_dec
2007 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
2008 """\
2009 Return a scan where all spectra are converted to either
2010 Jansky or Kelvin depending upon the flux units of the scan table.
2011 By default the function tries to look the values up internally.
2012 If it can't find them (or if you want to over-ride), you must
2013 specify EITHER jyperk OR eta (and D which it will try to look up
2014 also if you don't set it). jyperk takes precedence if you set both.
2015
2016 Parameters:
2017
2018 jyperk: the Jy / K conversion factor
2019
2020 eta: the aperture efficiency
2021
2022 d: the geometric diameter (metres)
2023
2024 insitu: if False a new scantable is returned.
2025 Otherwise, the scaling is done in-situ
2026 The default is taken from .asaprc (False)
2027
2028 """
2029 if insitu is None: insitu = rcParams['insitu']
2030 self._math._setinsitu(insitu)
2031 varlist = vars()
2032 jyperk = jyperk or -1.0
2033 d = d or -1.0
2034 eta = eta or -1.0
2035 s = scantable(self._math._convertflux(self, d, eta, jyperk))
2036 s._add_history("convert_flux", varlist)
2037 if insitu: self._assign(s)
2038 else: return s
2039
2040 @asaplog_post_dec
2041 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
2042 """\
2043 Return a scan after applying a gain-elevation correction.
2044 The correction can be made via either a polynomial or a
2045 table-based interpolation (and extrapolation if necessary).
2046 You specify polynomial coefficients, an ascii table or neither.
2047 If you specify neither, then a polynomial correction will be made
2048 with built in coefficients known for certain telescopes (an error
2049 will occur if the instrument is not known).
2050 The data and Tsys are *divided* by the scaling factors.
2051
2052 Parameters:
2053
2054 poly: Polynomial coefficients (default None) to compute a
2055 gain-elevation correction as a function of
2056 elevation (in degrees).
2057
2058 filename: The name of an ascii file holding correction factors.
2059 The first row of the ascii file must give the column
2060 names and these MUST include columns
2061 'ELEVATION' (degrees) and 'FACTOR' (multiply data
2062 by this) somewhere.
2063 The second row must give the data type of the
2064 column. Use 'R' for Real and 'I' for Integer.
2065 An example file would be
2066 (actual factors are arbitrary) :
2067
2068 TIME ELEVATION FACTOR
2069 R R R
2070 0.1 0 0.8
2071 0.2 20 0.85
2072 0.3 40 0.9
2073 0.4 60 0.85
2074 0.5 80 0.8
2075 0.6 90 0.75
2076
2077 method: Interpolation method when correcting from a table.
2078 Values are 'nearest', 'linear' (default), 'cubic'
2079 and 'spline'
2080
2081 insitu: if False a new scantable is returned.
2082 Otherwise, the scaling is done in-situ
2083 The default is taken from .asaprc (False)
2084
2085 """
2086
2087 if insitu is None: insitu = rcParams['insitu']
2088 self._math._setinsitu(insitu)
2089 varlist = vars()
2090 poly = poly or ()
2091 from os.path import expandvars
2092 filename = expandvars(filename)
2093 s = scantable(self._math._gainel(self, poly, filename, method))
2094 s._add_history("gain_el", varlist)
2095 if insitu:
2096 self._assign(s)
2097 else:
2098 return s
2099
2100 @asaplog_post_dec
2101 def freq_align(self, reftime=None, method='cubic', insitu=None):
2102 """\
2103 Return a scan where all rows have been aligned in frequency/velocity.
2104 The alignment frequency frame (e.g. LSRK) is that set by function
2105 set_freqframe.
2106
2107 Parameters:
2108
2109 reftime: reference time to align at. By default, the time of
2110 the first row of data is used.
2111
2112 method: Interpolation method for regridding the spectra.
2113 Choose from 'nearest', 'linear', 'cubic' (default)
2114 and 'spline'
2115
2116 insitu: if False a new scantable is returned.
2117 Otherwise, the scaling is done in-situ
2118 The default is taken from .asaprc (False)
2119
2120 """
2121 if insitu is None: insitu = rcParams["insitu"]
2122 oldInsitu = self._math._insitu()
2123 self._math._setinsitu(insitu)
2124 varlist = vars()
2125 reftime = reftime or ""
2126 s = scantable(self._math._freq_align(self, reftime, method))
2127 s._add_history("freq_align", varlist)
2128 self._math._setinsitu(oldInsitu)
2129 if insitu:
2130 self._assign(s)
2131 else:
2132 return s
2133
2134 @asaplog_post_dec
2135 def opacity(self, tau=None, insitu=None):
2136 """\
2137 Apply an opacity correction. The data
2138 and Tsys are multiplied by the correction factor.
2139
2140 Parameters:
2141
2142 tau: (list of) opacity from which the correction factor is
2143 exp(tau*ZD)
2144 where ZD is the zenith-distance.
2145 If a list is provided, it has to be of length nIF,
2146 nIF*nPol or 1 and in order of IF/POL, e.g.
2147 [opif0pol0, opif0pol1, opif1pol0 ...]
2148 if tau is `None` the opacities are determined from a
2149 model.
2150
2151 insitu: if False a new scantable is returned.
2152 Otherwise, the scaling is done in-situ
2153 The default is taken from .asaprc (False)
2154
2155 """
2156 if insitu is None:
2157 insitu = rcParams['insitu']
2158 self._math._setinsitu(insitu)
2159 varlist = vars()
2160 if not hasattr(tau, "__len__"):
2161 tau = [tau]
2162 s = scantable(self._math._opacity(self, tau))
2163 s._add_history("opacity", varlist)
2164 if insitu:
2165 self._assign(s)
2166 else:
2167 return s
2168
2169 @asaplog_post_dec
2170 def bin(self, width=5, insitu=None):
2171 """\
2172 Return a scan where all spectra have been binned up.
2173
2174 Parameters:
2175
2176 width: The bin width (default=5) in pixels
2177
2178 insitu: if False a new scantable is returned.
2179 Otherwise, the scaling is done in-situ
2180 The default is taken from .asaprc (False)
2181
2182 """
2183 if insitu is None:
2184 insitu = rcParams['insitu']
2185 self._math._setinsitu(insitu)
2186 varlist = vars()
2187 s = scantable(self._math._bin(self, width))
2188 s._add_history("bin", varlist)
2189 if insitu:
2190 self._assign(s)
2191 else:
2192 return s
2193
2194 @asaplog_post_dec
2195 def reshape(self, first, last, insitu=None):
2196 """Resize the band by providing first and last channel.
2197 This will cut off all channels outside [first, last].
2198 """
2199 if insitu is None:
2200 insitu = rcParams['insitu']
2201 varlist = vars()
2202 if last < 0:
2203 last = self.nchan()-1 + last
2204 s = None
2205 if insitu:
2206 s = self
2207 else:
2208 s = self.copy()
2209 s._reshape(first,last)
2210 s._add_history("reshape", varlist)
2211 if not insitu:
2212 return s
2213
2214 @asaplog_post_dec
2215 def resample(self, width=5, method='cubic', insitu=None):
2216 """\
2217 Return a scan where all spectra have been binned up.
2218
2219 Parameters:
2220
2221 width: The bin width (default=5) in pixels
2222
2223 method: Interpolation method when correcting from a table.
2224 Values are 'nearest', 'linear', 'cubic' (default)
2225 and 'spline'
2226
2227 insitu: if False a new scantable is returned.
2228 Otherwise, the scaling is done in-situ
2229 The default is taken from .asaprc (False)
2230
2231 """
2232 if insitu is None:
2233 insitu = rcParams['insitu']
2234 self._math._setinsitu(insitu)
2235 varlist = vars()
2236 s = scantable(self._math._resample(self, method, width))
2237 s._add_history("resample", varlist)
2238 if insitu:
2239 self._assign(s)
2240 else:
2241 return s
2242
2243 @asaplog_post_dec
2244 def average_pol(self, mask=None, weight='none'):
2245 """\
2246 Average the Polarisations together.
2247
2248 Parameters:
2249
2250 mask: An optional mask defining the region, where the
2251 averaging will be applied. The output will have all
2252 specified points masked.
2253
2254 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2255 weighted), or 'tsys' (1/Tsys**2 weighted)
2256
2257 """
2258 varlist = vars()
2259 mask = mask or ()
2260 s = scantable(self._math._averagepol(self, mask, weight.upper()))
2261 s._add_history("average_pol", varlist)
2262 return s
2263
2264 @asaplog_post_dec
2265 def average_beam(self, mask=None, weight='none'):
2266 """\
2267 Average the Beams together.
2268
2269 Parameters:
2270 mask: An optional mask defining the region, where the
2271 averaging will be applied. The output will have all
2272 specified points masked.
2273
2274 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2275 weighted), or 'tsys' (1/Tsys**2 weighted)
2276
2277 """
2278 varlist = vars()
2279 mask = mask or ()
2280 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
2281 s._add_history("average_beam", varlist)
2282 return s
2283
2284 def parallactify(self, pflag):
2285 """\
2286 Set a flag to indicate whether this data should be treated as having
2287 been 'parallactified' (total phase == 0.0)
2288
2289 Parameters:
2290
2291 pflag: Bool indicating whether to turn this on (True) or
2292 off (False)
2293
2294 """
2295 varlist = vars()
2296 self._parallactify(pflag)
2297 self._add_history("parallactify", varlist)
2298
2299 @asaplog_post_dec
2300 def convert_pol(self, poltype=None):
2301 """\
2302 Convert the data to a different polarisation type.
2303 Note that you will need cross-polarisation terms for most conversions.
2304
2305 Parameters:
2306
2307 poltype: The new polarisation type. Valid types are:
2308 'linear', 'circular', 'stokes' and 'linpol'
2309
2310 """
2311 varlist = vars()
2312 s = scantable(self._math._convertpol(self, poltype))
2313 s._add_history("convert_pol", varlist)
2314 return s
2315
2316 @asaplog_post_dec
2317 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False,
2318 insitu=None):
2319 """\
2320 Smooth the spectrum by the specified kernel (conserving flux).
2321
2322 Parameters:
2323
2324 kernel: The type of smoothing kernel. Select from
2325 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
2326 or 'poly'
2327
2328 width: The width of the kernel in pixels. For hanning this is
2329 ignored otherwise it defauls to 5 pixels.
2330 For 'gaussian' it is the Full Width Half
2331 Maximum. For 'boxcar' it is the full width.
2332 For 'rmedian' and 'poly' it is the half width.
2333
2334 order: Optional parameter for 'poly' kernel (default is 2), to
2335 specify the order of the polnomial. Ignored by all other
2336 kernels.
2337
2338 plot: plot the original and the smoothed spectra.
2339 In this each indivual fit has to be approved, by
2340 typing 'y' or 'n'
2341
2342 insitu: if False a new scantable is returned.
2343 Otherwise, the scaling is done in-situ
2344 The default is taken from .asaprc (False)
2345
2346 """
2347 if insitu is None: insitu = rcParams['insitu']
2348 self._math._setinsitu(insitu)
2349 varlist = vars()
2350
2351 if plot: orgscan = self.copy()
2352
2353 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
2354 s._add_history("smooth", varlist)
2355
2356 action = 'H'
2357 if plot:
2358 from asap.asapplotter import new_asaplot
2359 theplot = new_asaplot(rcParams['plotter.gui'])
2360 from matplotlib import rc as rcp
2361 rcp('lines', linewidth=1)
2362 theplot.set_panels()
2363 ylab=s._get_ordinate_label()
2364 #theplot.palette(0,["#777777","red"])
2365 for r in xrange(s.nrow()):
2366 xsm=s._getabcissa(r)
2367 ysm=s._getspectrum(r)
2368 xorg=orgscan._getabcissa(r)
2369 yorg=orgscan._getspectrum(r)
2370 if action != "N": #skip plotting if rejecting all
2371 theplot.clear()
2372 theplot.hold()
2373 theplot.set_axes('ylabel',ylab)
2374 theplot.set_axes('xlabel',s._getabcissalabel(r))
2375 theplot.set_axes('title',s._getsourcename(r))
2376 theplot.set_line(label='Original',color="#777777")
2377 theplot.plot(xorg,yorg)
2378 theplot.set_line(label='Smoothed',color="red")
2379 theplot.plot(xsm,ysm)
2380 ### Ugly part for legend
2381 for i in [0,1]:
2382 theplot.subplots[0]['lines'].append(
2383 [theplot.subplots[0]['axes'].lines[i]]
2384 )
2385 theplot.release()
2386 ### Ugly part for legend
2387 theplot.subplots[0]['lines']=[]
2388 res = self._get_verify_action("Accept smoothing?",action)
2389 #print "IF%d, POL%d: got result = %s" %(s.getif(r),s.getpol(r),res)
2390 if r == 0: action = None
2391 #res = raw_input("Accept smoothing ([y]/n): ")
2392 if res.upper() == 'N':
2393 # reject for the current rows
2394 s._setspectrum(yorg, r)
2395 elif res.upper() == 'R':
2396 # reject all the following rows
2397 action = "N"
2398 s._setspectrum(yorg, r)
2399 elif res.upper() == 'A':
2400 # accept all the following rows
2401 break
2402 theplot.quit()
2403 del theplot
2404 del orgscan
2405
2406 if insitu: self._assign(s)
2407 else: return s
2408
2409 @asaplog_post_dec
2410 def regrid_channel(self, width=5, plot=False, insitu=None):
2411 """\
2412 Regrid the spectra by the specified channel width
2413
2414 Parameters:
2415
2416 width: The channel width (float) of regridded spectra
2417 in the current spectral unit.
2418
2419 plot: [NOT IMPLEMENTED YET]
2420 plot the original and the regridded spectra.
2421 In this each indivual fit has to be approved, by
2422 typing 'y' or 'n'
2423
2424 insitu: if False a new scantable is returned.
2425 Otherwise, the scaling is done in-situ
2426 The default is taken from .asaprc (False)
2427
2428 """
2429 if insitu is None: insitu = rcParams['insitu']
2430 varlist = vars()
2431
2432 if plot:
2433 asaplog.post()
2434 asaplog.push("Verification plot is not implemtnetd yet.")
2435 asaplog.post("WARN")
2436
2437 s = self.copy()
2438 s._regrid_specchan(width)
2439
2440 s._add_history("regrid_channel", varlist)
2441
2442# if plot:
2443# from asap.asapplotter import new_asaplot
2444# theplot = new_asaplot(rcParams['plotter.gui'])
2445# from matplotlib import rc as rcp
2446# rcp('lines', linewidth=1)
2447# theplot.set_panels()
2448# ylab=s._get_ordinate_label()
2449# #theplot.palette(0,["#777777","red"])
2450# for r in xrange(s.nrow()):
2451# xsm=s._getabcissa(r)
2452# ysm=s._getspectrum(r)
2453# xorg=orgscan._getabcissa(r)
2454# yorg=orgscan._getspectrum(r)
2455# theplot.clear()
2456# theplot.hold()
2457# theplot.set_axes('ylabel',ylab)
2458# theplot.set_axes('xlabel',s._getabcissalabel(r))
2459# theplot.set_axes('title',s._getsourcename(r))
2460# theplot.set_line(label='Original',color="#777777")
2461# theplot.plot(xorg,yorg)
2462# theplot.set_line(label='Smoothed',color="red")
2463# theplot.plot(xsm,ysm)
2464# ### Ugly part for legend
2465# for i in [0,1]:
2466# theplot.subplots[0]['lines'].append(
2467# [theplot.subplots[0]['axes'].lines[i]]
2468# )
2469# theplot.release()
2470# ### Ugly part for legend
2471# theplot.subplots[0]['lines']=[]
2472# res = raw_input("Accept smoothing ([y]/n): ")
2473# if res.upper() == 'N':
2474# s._setspectrum(yorg, r)
2475# theplot.quit()
2476# del theplot
2477# del orgscan
2478
2479 if insitu: self._assign(s)
2480 else: return s
2481
2482 @asaplog_post_dec
2483 def _parse_wn(self, wn):
2484 if isinstance(wn, list) or isinstance(wn, tuple):
2485 return wn
2486 elif isinstance(wn, int):
2487 return [ wn ]
2488 elif isinstance(wn, str):
2489 if '-' in wn: # case 'a-b' : return [a,a+1,...,b-1,b]
2490 val = wn.split('-')
2491 val = [int(val[0]), int(val[1])]
2492 val.sort()
2493 res = [i for i in xrange(val[0], val[1]+1)]
2494 elif wn[:2] == '<=' or wn[:2] == '=<': # cases '<=a','=<a' : return [0,1,...,a-1,a]
2495 val = int(wn[2:])+1
2496 res = [i for i in xrange(val)]
2497 elif wn[-2:] == '>=' or wn[-2:] == '=>': # cases 'a>=','a=>' : return [0,1,...,a-1,a]
2498 val = int(wn[:-2])+1
2499 res = [i for i in xrange(val)]
2500 elif wn[0] == '<': # case '<a' : return [0,1,...,a-2,a-1]
2501 val = int(wn[1:])
2502 res = [i for i in xrange(val)]
2503 elif wn[-1] == '>': # case 'a>' : return [0,1,...,a-2,a-1]
2504 val = int(wn[:-1])
2505 res = [i for i in xrange(val)]
2506 elif wn[:2] == '>=' or wn[:2] == '=>': # cases '>=a','=>a' : return [a,-999], which is
2507 # then interpreted in C++
2508 # side as [a,a+1,...,a_nyq]
2509 # (CAS-3759)
2510 val = int(wn[2:])
2511 res = [val, -999]
2512 #res = [i for i in xrange(val, self.nchan()/2+1)]
2513 elif wn[-2:] == '<=' or wn[-2:] == '=<': # cases 'a<=','a=<' : return [a,-999], which is
2514 # then interpreted in C++
2515 # side as [a,a+1,...,a_nyq]
2516 # (CAS-3759)
2517 val = int(wn[:-2])
2518 res = [val, -999]
2519 #res = [i for i in xrange(val, self.nchan()/2+1)]
2520 elif wn[0] == '>': # case '>a' : return [a+1,-999], which is
2521 # then interpreted in C++
2522 # side as [a+1,a+2,...,a_nyq]
2523 # (CAS-3759)
2524 val = int(wn[1:])+1
2525 res = [val, -999]
2526 #res = [i for i in xrange(val, self.nchan()/2+1)]
2527 elif wn[-1] == '<': # case 'a<' : return [a+1,-999], which is
2528 # then interpreted in C++
2529 # side as [a+1,a+2,...,a_nyq]
2530 # (CAS-3759)
2531 val = int(wn[:-1])+1
2532 res = [val, -999]
2533 #res = [i for i in xrange(val, self.nchan()/2+1)]
2534
2535 return res
2536 else:
2537 msg = 'wrong value given for addwn/rejwn'
2538 raise RuntimeError(msg)
2539
2540 @asaplog_post_dec
2541 def calc_aic(self, value=None, blfunc=None, order=None, mask=None,
2542 whichrow=None, uselinefinder=None, edge=None,
2543 threshold=None, chan_avg_limit=None):
2544 """\
2545 Calculates and returns model selection criteria for a specified
2546 baseline model and a given spectrum data.
2547 Available values include Akaike Information Criterion (AIC), the
2548 corrected Akaike Information Criterion (AICc) by Sugiura(1978),
2549 Bayesian Information Criterion (BIC) and the Generalised Cross
2550 Validation (GCV).
2551
2552 Parameters:
2553 value: name of model selection criteria to calculate.
2554 available ones include 'aic', 'aicc', 'bic' and
2555 'gcv'. default is 'aicc'.
2556 blfunc: baseline function name. available ones include
2557 'chebyshev', 'cspline' and 'sinusoid'.
2558 default is 'chebyshev'.
2559 order: parameter for basline function. actually stands for
2560 order of polynomial (order) for 'chebyshev',
2561 number of spline pieces (npiece) for 'cspline' and
2562 maximum wave number for 'sinusoid', respectively.
2563 default is 5 (which is also the default order value
2564 for [auto_]chebyshev_baseline()).
2565 mask: an optional mask. default is [].
2566 whichrow: row number. default is 0 (the first row)
2567 uselinefinder: use sd.linefinder() to flag out line regions
2568 default is True.
2569 edge: an optional number of channel to drop at
2570 the edge of spectrum. If only one value is
2571 specified, the same number will be dropped
2572 from both sides of the spectrum. Default
2573 is to keep all channels. Nested tuples
2574 represent individual edge selection for
2575 different IFs (a number of spectral channels
2576 can be different)
2577 default is (0, 0).
2578 threshold: the threshold used by line finder. It is
2579 better to keep it large as only strong lines
2580 affect the baseline solution.
2581 default is 3.
2582 chan_avg_limit: a maximum number of consequtive spectral
2583 channels to average during the search of
2584 weak and broad lines. The default is no
2585 averaging (and no search for weak lines).
2586 If such lines can affect the fitted baseline
2587 (e.g. a high order polynomial is fitted),
2588 increase this parameter (usually values up
2589 to 8 are reasonable). Most users of this
2590 method should find the default value sufficient.
2591 default is 1.
2592
2593 Example:
2594 aic = scan.calc_aic(blfunc='chebyshev', order=5, whichrow=0)
2595 """
2596
2597 try:
2598 varlist = vars()
2599
2600 if value is None: value = 'aicc'
2601 if blfunc is None: blfunc = 'chebyshev'
2602 if order is None: order = 5
2603 if mask is None: mask = []
2604 if whichrow is None: whichrow = 0
2605 if uselinefinder is None: uselinefinder = True
2606 if edge is None: edge = (0, 0)
2607 if threshold is None: threshold = 3
2608 if chan_avg_limit is None: chan_avg_limit = 1
2609
2610 return self._calc_aic(value, blfunc, order, mask,
2611 whichrow, uselinefinder, edge,
2612 threshold, chan_avg_limit)
2613
2614 except RuntimeError, e:
2615 raise_fitting_failure_exception(e)
2616
2617 @asaplog_post_dec
2618 def sinusoid_baseline(self, insitu=None, mask=None, applyfft=None,
2619 fftmethod=None, fftthresh=None,
2620 addwn=None, rejwn=None, clipthresh=None,
2621 clipniter=None, plot=None,
2622 getresidual=None, showprogress=None,
2623 minnrow=None, outlog=None, blfile=None, csvformat=None):
2624 """\
2625 Return a scan which has been baselined (all rows) with sinusoidal
2626 functions.
2627
2628 Parameters:
2629 insitu: if False a new scantable is returned.
2630 Otherwise, the scaling is done in-situ
2631 The default is taken from .asaprc (False)
2632 mask: an optional mask
2633 applyfft: if True use some method, such as FFT, to find
2634 strongest sinusoidal components in the wavenumber
2635 domain to be used for baseline fitting.
2636 default is True.
2637 fftmethod: method to find the strong sinusoidal components.
2638 now only 'fft' is available and it is the default.
2639 fftthresh: the threshold to select wave numbers to be used for
2640 fitting from the distribution of amplitudes in the
2641 wavenumber domain.
2642 both float and string values accepted.
2643 given a float value, the unit is set to sigma.
2644 for string values, allowed formats include:
2645 'xsigma' or 'x' (= x-sigma level. e.g.,
2646 '3sigma'), or
2647 'topx' (= the x strongest ones, e.g. 'top5').
2648 default is 3.0 (unit: sigma).
2649 addwn: the additional wave numbers to be used for fitting.
2650 list or integer value is accepted to specify every
2651 wave numbers. also string value can be used in case
2652 you need to specify wave numbers in a certain range,
2653 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
2654 '<a' (= 0,1,...,a-2,a-1),
2655 '>=a' (= a, a+1, ... up to the maximum wave
2656 number corresponding to the Nyquist
2657 frequency for the case of FFT).
2658 default is [0].
2659 rejwn: the wave numbers NOT to be used for fitting.
2660 can be set just as addwn but has higher priority:
2661 wave numbers which are specified both in addwn
2662 and rejwn will NOT be used. default is [].
2663 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2664 clipniter: maximum number of iteration of 'clipthresh'-sigma
2665 clipping (default is 0)
2666 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2667 plot the fit and the residual. In this each
2668 indivual fit has to be approved, by typing 'y'
2669 or 'n'
2670 getresidual: if False, returns best-fit values instead of
2671 residual. (default is True)
2672 showprogress: show progress status for large data.
2673 default is True.
2674 minnrow: minimum number of input spectra to show.
2675 default is 1000.
2676 outlog: Output the coefficients of the best-fit
2677 function to logger (default is False)
2678 blfile: Name of a text file in which the best-fit
2679 parameter values to be written
2680 (default is '': no file/logger output)
2681 csvformat: if True blfile is csv-formatted, default is False.
2682
2683 Example:
2684 # return a scan baselined by a combination of sinusoidal curves
2685 # having wave numbers in spectral window up to 10,
2686 # also with 3-sigma clipping, iteration up to 4 times
2687 bscan = scan.sinusoid_baseline(addwn='<=10',clipthresh=3.0,clipniter=4)
2688
2689 Note:
2690 The best-fit parameter values output in logger and/or blfile are now
2691 based on specunit of 'channel'.
2692 """
2693
2694 try:
2695 varlist = vars()
2696
2697 if insitu is None: insitu = rcParams['insitu']
2698 if insitu:
2699 workscan = self
2700 else:
2701 workscan = self.copy()
2702
2703 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
2704 if mask is None: mask = []
2705 if applyfft is None: applyfft = True
2706 if fftmethod is None: fftmethod = 'fft'
2707 if fftthresh is None: fftthresh = 3.0
2708 if addwn is None: addwn = [0]
2709 if rejwn is None: rejwn = []
2710 if clipthresh is None: clipthresh = 3.0
2711 if clipniter is None: clipniter = 0
2712 if plot is None: plot = False
2713 if getresidual is None: getresidual = True
2714 if showprogress is None: showprogress = True
2715 if minnrow is None: minnrow = 1000
2716 if outlog is None: outlog = False
2717 if blfile is None: blfile = ''
2718 if csvformat is None: csvformat = False
2719
2720 if csvformat:
2721 scsvformat = "T"
2722 else:
2723 scsvformat = "F"
2724
2725 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
2726 workscan._sinusoid_baseline(mask, applyfft, fftmethod.lower(),
2727 str(fftthresh).lower(),
2728 workscan._parse_wn(addwn),
2729 workscan._parse_wn(rejwn),
2730 clipthresh, clipniter,
2731 getresidual,
2732 pack_progress_params(showprogress,
2733 minnrow),
2734 outlog, scsvformat+blfile)
2735 workscan._add_history('sinusoid_baseline', varlist)
2736
2737 if insitu:
2738 self._assign(workscan)
2739 else:
2740 return workscan
2741
2742 except RuntimeError, e:
2743 raise_fitting_failure_exception(e)
2744
2745
2746 @asaplog_post_dec
2747 def auto_sinusoid_baseline(self, insitu=None, mask=None, applyfft=None,
2748 fftmethod=None, fftthresh=None,
2749 addwn=None, rejwn=None, clipthresh=None,
2750 clipniter=None, edge=None, threshold=None,
2751 chan_avg_limit=None, plot=None,
2752 getresidual=None, showprogress=None,
2753 minnrow=None, outlog=None, blfile=None, csvformat=None):
2754 """\
2755 Return a scan which has been baselined (all rows) with sinusoidal
2756 functions.
2757 Spectral lines are detected first using linefinder and masked out
2758 to avoid them affecting the baseline solution.
2759
2760 Parameters:
2761 insitu: if False a new scantable is returned.
2762 Otherwise, the scaling is done in-situ
2763 The default is taken from .asaprc (False)
2764 mask: an optional mask retreived from scantable
2765 applyfft: if True use some method, such as FFT, to find
2766 strongest sinusoidal components in the wavenumber
2767 domain to be used for baseline fitting.
2768 default is True.
2769 fftmethod: method to find the strong sinusoidal components.
2770 now only 'fft' is available and it is the default.
2771 fftthresh: the threshold to select wave numbers to be used for
2772 fitting from the distribution of amplitudes in the
2773 wavenumber domain.
2774 both float and string values accepted.
2775 given a float value, the unit is set to sigma.
2776 for string values, allowed formats include:
2777 'xsigma' or 'x' (= x-sigma level. e.g.,
2778 '3sigma'), or
2779 'topx' (= the x strongest ones, e.g. 'top5').
2780 default is 3.0 (unit: sigma).
2781 addwn: the additional wave numbers to be used for fitting.
2782 list or integer value is accepted to specify every
2783 wave numbers. also string value can be used in case
2784 you need to specify wave numbers in a certain range,
2785 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
2786 '<a' (= 0,1,...,a-2,a-1),
2787 '>=a' (= a, a+1, ... up to the maximum wave
2788 number corresponding to the Nyquist
2789 frequency for the case of FFT).
2790 default is [0].
2791 rejwn: the wave numbers NOT to be used for fitting.
2792 can be set just as addwn but has higher priority:
2793 wave numbers which are specified both in addwn
2794 and rejwn will NOT be used. default is [].
2795 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2796 clipniter: maximum number of iteration of 'clipthresh'-sigma
2797 clipping (default is 0)
2798 edge: an optional number of channel to drop at
2799 the edge of spectrum. If only one value is
2800 specified, the same number will be dropped
2801 from both sides of the spectrum. Default
2802 is to keep all channels. Nested tuples
2803 represent individual edge selection for
2804 different IFs (a number of spectral channels
2805 can be different)
2806 threshold: the threshold used by line finder. It is
2807 better to keep it large as only strong lines
2808 affect the baseline solution.
2809 chan_avg_limit: a maximum number of consequtive spectral
2810 channels to average during the search of
2811 weak and broad lines. The default is no
2812 averaging (and no search for weak lines).
2813 If such lines can affect the fitted baseline
2814 (e.g. a high order polynomial is fitted),
2815 increase this parameter (usually values up
2816 to 8 are reasonable). Most users of this
2817 method should find the default value sufficient.
2818 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2819 plot the fit and the residual. In this each
2820 indivual fit has to be approved, by typing 'y'
2821 or 'n'
2822 getresidual: if False, returns best-fit values instead of
2823 residual. (default is True)
2824 showprogress: show progress status for large data.
2825 default is True.
2826 minnrow: minimum number of input spectra to show.
2827 default is 1000.
2828 outlog: Output the coefficients of the best-fit
2829 function to logger (default is False)
2830 blfile: Name of a text file in which the best-fit
2831 parameter values to be written
2832 (default is "": no file/logger output)
2833 csvformat: if True blfile is csv-formatted, default is False.
2834
2835 Example:
2836 bscan = scan.auto_sinusoid_baseline(addwn='<=10', insitu=False)
2837
2838 Note:
2839 The best-fit parameter values output in logger and/or blfile are now
2840 based on specunit of 'channel'.
2841 """
2842
2843 try:
2844 varlist = vars()
2845
2846 if insitu is None: insitu = rcParams['insitu']
2847 if insitu:
2848 workscan = self
2849 else:
2850 workscan = self.copy()
2851
2852 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
2853 if mask is None: mask = []
2854 if applyfft is None: applyfft = True
2855 if fftmethod is None: fftmethod = 'fft'
2856 if fftthresh is None: fftthresh = 3.0
2857 if addwn is None: addwn = [0]
2858 if rejwn is None: rejwn = []
2859 if clipthresh is None: clipthresh = 3.0
2860 if clipniter is None: clipniter = 0
2861 if edge is None: edge = (0,0)
2862 if threshold is None: threshold = 3
2863 if chan_avg_limit is None: chan_avg_limit = 1
2864 if plot is None: plot = False
2865 if getresidual is None: getresidual = True
2866 if showprogress is None: showprogress = True
2867 if minnrow is None: minnrow = 1000
2868 if outlog is None: outlog = False
2869 if blfile is None: blfile = ''
2870 if csvformat is None: csvformat = False
2871
2872 if csvformat:
2873 scsvformat = "T"
2874 else:
2875 scsvformat = "F"
2876
2877 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
2878 workscan._auto_sinusoid_baseline(mask, applyfft,
2879 fftmethod.lower(),
2880 str(fftthresh).lower(),
2881 workscan._parse_wn(addwn),
2882 workscan._parse_wn(rejwn),
2883 clipthresh, clipniter,
2884 normalise_edge_param(edge),
2885 threshold, chan_avg_limit,
2886 getresidual,
2887 pack_progress_params(showprogress,
2888 minnrow),
2889 outlog, scsvformat+blfile)
2890 workscan._add_history("auto_sinusoid_baseline", varlist)
2891
2892 if insitu:
2893 self._assign(workscan)
2894 else:
2895 return workscan
2896
2897 except RuntimeError, e:
2898 raise_fitting_failure_exception(e)
2899
2900 @asaplog_post_dec
2901 def cspline_baseline(self, insitu=None, mask=None, npiece=None,
2902 clipthresh=None, clipniter=None, plot=None,
2903 getresidual=None, showprogress=None, minnrow=None,
2904 outlog=None, blfile=None, csvformat=None):
2905 """\
2906 Return a scan which has been baselined (all rows) by cubic spline
2907 function (piecewise cubic polynomial).
2908
2909 Parameters:
2910 insitu: If False a new scantable is returned.
2911 Otherwise, the scaling is done in-situ
2912 The default is taken from .asaprc (False)
2913 mask: An optional mask
2914 npiece: Number of pieces. (default is 2)
2915 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2916 clipniter: maximum number of iteration of 'clipthresh'-sigma
2917 clipping (default is 0)
2918 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2919 plot the fit and the residual. In this each
2920 indivual fit has to be approved, by typing 'y'
2921 or 'n'
2922 getresidual: if False, returns best-fit values instead of
2923 residual. (default is True)
2924 showprogress: show progress status for large data.
2925 default is True.
2926 minnrow: minimum number of input spectra to show.
2927 default is 1000.
2928 outlog: Output the coefficients of the best-fit
2929 function to logger (default is False)
2930 blfile: Name of a text file in which the best-fit
2931 parameter values to be written
2932 (default is "": no file/logger output)
2933 csvformat: if True blfile is csv-formatted, default is False.
2934
2935 Example:
2936 # return a scan baselined by a cubic spline consisting of 2 pieces
2937 # (i.e., 1 internal knot),
2938 # also with 3-sigma clipping, iteration up to 4 times
2939 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
2940
2941 Note:
2942 The best-fit parameter values output in logger and/or blfile are now
2943 based on specunit of 'channel'.
2944 """
2945
2946 try:
2947 varlist = vars()
2948
2949 if insitu is None: insitu = rcParams['insitu']
2950 if insitu:
2951 workscan = self
2952 else:
2953 workscan = self.copy()
2954
2955 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
2956 if mask is None: mask = []
2957 if npiece is None: npiece = 2
2958 if clipthresh is None: clipthresh = 3.0
2959 if clipniter is None: clipniter = 0
2960 if plot is None: plot = False
2961 if getresidual is None: getresidual = True
2962 if showprogress is None: showprogress = True
2963 if minnrow is None: minnrow = 1000
2964 if outlog is None: outlog = False
2965 if blfile is None: blfile = ''
2966 if csvformat is None: csvformat = False
2967
2968 if csvformat:
2969 scsvformat = "T"
2970 else:
2971 scsvformat = "F"
2972
2973 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
2974 workscan._cspline_baseline(mask, npiece, clipthresh, clipniter,
2975 getresidual,
2976 pack_progress_params(showprogress,
2977 minnrow),
2978 outlog, scsvformat+blfile)
2979 workscan._add_history("cspline_baseline", varlist)
2980
2981 if insitu:
2982 self._assign(workscan)
2983 else:
2984 return workscan
2985
2986 except RuntimeError, e:
2987 raise_fitting_failure_exception(e)
2988
2989 @asaplog_post_dec
2990 def auto_cspline_baseline(self, insitu=None, mask=None, npiece=None,
2991 clipthresh=None, clipniter=None,
2992 edge=None, threshold=None, chan_avg_limit=None,
2993 getresidual=None, plot=None,
2994 showprogress=None, minnrow=None, outlog=None,
2995 blfile=None, csvformat=None):
2996 """\
2997 Return a scan which has been baselined (all rows) by cubic spline
2998 function (piecewise cubic polynomial).
2999 Spectral lines are detected first using linefinder and masked out
3000 to avoid them affecting the baseline solution.
3001
3002 Parameters:
3003 insitu: if False a new scantable is returned.
3004 Otherwise, the scaling is done in-situ
3005 The default is taken from .asaprc (False)
3006 mask: an optional mask retreived from scantable
3007 npiece: Number of pieces. (default is 2)
3008 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3009 clipniter: maximum number of iteration of 'clipthresh'-sigma
3010 clipping (default is 0)
3011 edge: an optional number of channel to drop at
3012 the edge of spectrum. If only one value is
3013 specified, the same number will be dropped
3014 from both sides of the spectrum. Default
3015 is to keep all channels. Nested tuples
3016 represent individual edge selection for
3017 different IFs (a number of spectral channels
3018 can be different)
3019 threshold: the threshold used by line finder. It is
3020 better to keep it large as only strong lines
3021 affect the baseline solution.
3022 chan_avg_limit: a maximum number of consequtive spectral
3023 channels to average during the search of
3024 weak and broad lines. The default is no
3025 averaging (and no search for weak lines).
3026 If such lines can affect the fitted baseline
3027 (e.g. a high order polynomial is fitted),
3028 increase this parameter (usually values up
3029 to 8 are reasonable). Most users of this
3030 method should find the default value sufficient.
3031 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3032 plot the fit and the residual. In this each
3033 indivual fit has to be approved, by typing 'y'
3034 or 'n'
3035 getresidual: if False, returns best-fit values instead of
3036 residual. (default is True)
3037 showprogress: show progress status for large data.
3038 default is True.
3039 minnrow: minimum number of input spectra to show.
3040 default is 1000.
3041 outlog: Output the coefficients of the best-fit
3042 function to logger (default is False)
3043 blfile: Name of a text file in which the best-fit
3044 parameter values to be written
3045 (default is "": no file/logger output)
3046 csvformat: if True blfile is csv-formatted, default is False.
3047
3048 Example:
3049 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
3050
3051 Note:
3052 The best-fit parameter values output in logger and/or blfile are now
3053 based on specunit of 'channel'.
3054 """
3055
3056 try:
3057 varlist = vars()
3058
3059 if insitu is None: insitu = rcParams['insitu']
3060 if insitu:
3061 workscan = self
3062 else:
3063 workscan = self.copy()
3064
3065 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
3066 if mask is None: mask = []
3067 if npiece is None: npiece = 2
3068 if clipthresh is None: clipthresh = 3.0
3069 if clipniter is None: clipniter = 0
3070 if edge is None: edge = (0, 0)
3071 if threshold is None: threshold = 3
3072 if chan_avg_limit is None: chan_avg_limit = 1
3073 if plot is None: plot = False
3074 if getresidual is None: getresidual = True
3075 if showprogress is None: showprogress = True
3076 if minnrow is None: minnrow = 1000
3077 if outlog is None: outlog = False
3078 if blfile is None: blfile = ''
3079 if csvformat is None: csvformat = False
3080
3081 if csvformat:
3082 scsvformat = "T"
3083 else:
3084 scsvformat = "F"
3085
3086 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
3087 workscan._auto_cspline_baseline(mask, npiece, clipthresh,
3088 clipniter,
3089 normalise_edge_param(edge),
3090 threshold,
3091 chan_avg_limit, getresidual,
3092 pack_progress_params(showprogress,
3093 minnrow),
3094 outlog, scsvformat+blfile)
3095 workscan._add_history("auto_cspline_baseline", varlist)
3096
3097 if insitu:
3098 self._assign(workscan)
3099 else:
3100 return workscan
3101
3102 except RuntimeError, e:
3103 raise_fitting_failure_exception(e)
3104
3105 @asaplog_post_dec
3106 def chebyshev_baseline(self, insitu=None, mask=None, order=None,
3107 clipthresh=None, clipniter=None, plot=None,
3108 getresidual=None, showprogress=None, minnrow=None,
3109 outlog=None, blfile=None, csvformat=None):
3110 """\
3111 Return a scan which has been baselined (all rows) by Chebyshev polynomials.
3112
3113 Parameters:
3114 insitu: If False a new scantable is returned.
3115 Otherwise, the scaling is done in-situ
3116 The default is taken from .asaprc (False)
3117 mask: An optional mask
3118 order: the maximum order of Chebyshev polynomial (default is 5)
3119 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3120 clipniter: maximum number of iteration of 'clipthresh'-sigma
3121 clipping (default is 0)
3122 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3123 plot the fit and the residual. In this each
3124 indivual fit has to be approved, by typing 'y'
3125 or 'n'
3126 getresidual: if False, returns best-fit values instead of
3127 residual. (default is True)
3128 showprogress: show progress status for large data.
3129 default is True.
3130 minnrow: minimum number of input spectra to show.
3131 default is 1000.
3132 outlog: Output the coefficients of the best-fit
3133 function to logger (default is False)
3134 blfile: Name of a text file in which the best-fit
3135 parameter values to be written
3136 (default is "": no file/logger output)
3137 csvformat: if True blfile is csv-formatted, default is False.
3138
3139 Example:
3140 # return a scan baselined by a cubic spline consisting of 2 pieces
3141 # (i.e., 1 internal knot),
3142 # also with 3-sigma clipping, iteration up to 4 times
3143 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
3144
3145 Note:
3146 The best-fit parameter values output in logger and/or blfile are now
3147 based on specunit of 'channel'.
3148 """
3149
3150 try:
3151 varlist = vars()
3152
3153 if insitu is None: insitu = rcParams['insitu']
3154 if insitu:
3155 workscan = self
3156 else:
3157 workscan = self.copy()
3158
3159 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
3160 if mask is None: mask = []
3161 if order is None: order = 5
3162 if clipthresh is None: clipthresh = 3.0
3163 if clipniter is None: clipniter = 0
3164 if plot is None: plot = False
3165 if getresidual is None: getresidual = True
3166 if showprogress is None: showprogress = True
3167 if minnrow is None: minnrow = 1000
3168 if outlog is None: outlog = False
3169 if blfile is None: blfile = ''
3170 if csvformat is None: csvformat = False
3171
3172 if csvformat:
3173 scsvformat = "T"
3174 else:
3175 scsvformat = "F"
3176
3177 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
3178 workscan._chebyshev_baseline(mask, order, clipthresh, clipniter,
3179 getresidual,
3180 pack_progress_params(showprogress,
3181 minnrow),
3182 outlog, scsvformat+blfile)
3183 workscan._add_history("chebyshev_baseline", varlist)
3184
3185 if insitu:
3186 self._assign(workscan)
3187 else:
3188 return workscan
3189
3190 except RuntimeError, e:
3191 raise_fitting_failure_exception(e)
3192
3193 @asaplog_post_dec
3194 def auto_chebyshev_baseline(self, insitu=None, mask=None, order=None,
3195 clipthresh=None, clipniter=None,
3196 edge=None, threshold=None, chan_avg_limit=None,
3197 getresidual=None, plot=None,
3198 showprogress=None, minnrow=None, outlog=None,
3199 blfile=None, csvformat=None):
3200 """\
3201 Return a scan which has been baselined (all rows) by Chebyshev polynomials.
3202 Spectral lines are detected first using linefinder and masked out
3203 to avoid them affecting the baseline solution.
3204
3205 Parameters:
3206 insitu: if False a new scantable is returned.
3207 Otherwise, the scaling is done in-situ
3208 The default is taken from .asaprc (False)
3209 mask: an optional mask retreived from scantable
3210 order: the maximum order of Chebyshev polynomial (default is 5)
3211 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3212 clipniter: maximum number of iteration of 'clipthresh'-sigma
3213 clipping (default is 0)
3214 edge: an optional number of channel to drop at
3215 the edge of spectrum. If only one value is
3216 specified, the same number will be dropped
3217 from both sides of the spectrum. Default
3218 is to keep all channels. Nested tuples
3219 represent individual edge selection for
3220 different IFs (a number of spectral channels
3221 can be different)
3222 threshold: the threshold used by line finder. It is
3223 better to keep it large as only strong lines
3224 affect the baseline solution.
3225 chan_avg_limit: a maximum number of consequtive spectral
3226 channels to average during the search of
3227 weak and broad lines. The default is no
3228 averaging (and no search for weak lines).
3229 If such lines can affect the fitted baseline
3230 (e.g. a high order polynomial is fitted),
3231 increase this parameter (usually values up
3232 to 8 are reasonable). Most users of this
3233 method should find the default value sufficient.
3234 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3235 plot the fit and the residual. In this each
3236 indivual fit has to be approved, by typing 'y'
3237 or 'n'
3238 getresidual: if False, returns best-fit values instead of
3239 residual. (default is True)
3240 showprogress: show progress status for large data.
3241 default is True.
3242 minnrow: minimum number of input spectra to show.
3243 default is 1000.
3244 outlog: Output the coefficients of the best-fit
3245 function to logger (default is False)
3246 blfile: Name of a text file in which the best-fit
3247 parameter values to be written
3248 (default is "": no file/logger output)
3249 csvformat: if True blfile is csv-formatted, default is False.
3250
3251 Example:
3252 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
3253
3254 Note:
3255 The best-fit parameter values output in logger and/or blfile are now
3256 based on specunit of 'channel'.
3257 """
3258
3259 try:
3260 varlist = vars()
3261
3262 if insitu is None: insitu = rcParams['insitu']
3263 if insitu:
3264 workscan = self
3265 else:
3266 workscan = self.copy()
3267
3268 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
3269 if mask is None: mask = []
3270 if order is None: order = 5
3271 if clipthresh is None: clipthresh = 3.0
3272 if clipniter is None: clipniter = 0
3273 if edge is None: edge = (0, 0)
3274 if threshold is None: threshold = 3
3275 if chan_avg_limit is None: chan_avg_limit = 1
3276 if plot is None: plot = False
3277 if getresidual is None: getresidual = True
3278 if showprogress is None: showprogress = True
3279 if minnrow is None: minnrow = 1000
3280 if outlog is None: outlog = False
3281 if blfile is None: blfile = ''
3282 if csvformat is None: csvformat = False
3283
3284 if csvformat:
3285 scsvformat = "T"
3286 else:
3287 scsvformat = "F"
3288
3289 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
3290 workscan._auto_chebyshev_baseline(mask, order, clipthresh,
3291 clipniter,
3292 normalise_edge_param(edge),
3293 threshold,
3294 chan_avg_limit, getresidual,
3295 pack_progress_params(showprogress,
3296 minnrow),
3297 outlog, scsvformat+blfile)
3298 workscan._add_history("auto_chebyshev_baseline", varlist)
3299
3300 if insitu:
3301 self._assign(workscan)
3302 else:
3303 return workscan
3304
3305 except RuntimeError, e:
3306 raise_fitting_failure_exception(e)
3307
3308 @asaplog_post_dec
3309 def poly_baseline(self, mask=None, order=None, insitu=None, plot=None,
3310 getresidual=None, showprogress=None, minnrow=None,
3311 outlog=None, blfile=None, csvformat=None):
3312 """\
3313 Return a scan which has been baselined (all rows) by a polynomial.
3314 Parameters:
3315 insitu: if False a new scantable is returned.
3316 Otherwise, the scaling is done in-situ
3317 The default is taken from .asaprc (False)
3318 mask: an optional mask
3319 order: the order of the polynomial (default is 0)
3320 plot: plot the fit and the residual. In this each
3321 indivual fit has to be approved, by typing 'y'
3322 or 'n'
3323 getresidual: if False, returns best-fit values instead of
3324 residual. (default is True)
3325 showprogress: show progress status for large data.
3326 default is True.
3327 minnrow: minimum number of input spectra to show.
3328 default is 1000.
3329 outlog: Output the coefficients of the best-fit
3330 function to logger (default is False)
3331 blfile: Name of a text file in which the best-fit
3332 parameter values to be written
3333 (default is "": no file/logger output)
3334 csvformat: if True blfile is csv-formatted, default is False.
3335
3336 Example:
3337 # return a scan baselined by a third order polynomial,
3338 # not using a mask
3339 bscan = scan.poly_baseline(order=3)
3340 """
3341
3342 try:
3343 varlist = vars()
3344
3345 if insitu is None:
3346 insitu = rcParams["insitu"]
3347 if insitu:
3348 workscan = self
3349 else:
3350 workscan = self.copy()
3351
3352 #if mask is None: mask = [True for i in \
3353 # xrange(workscan.nchan())]
3354 if mask is None: mask = []
3355 if order is None: order = 0
3356 if plot is None: plot = False
3357 if getresidual is None: getresidual = True
3358 if showprogress is None: showprogress = True
3359 if minnrow is None: minnrow = 1000
3360 if outlog is None: outlog = False
3361 if blfile is None: blfile = ""
3362 if csvformat is None: csvformat = False
3363
3364 if csvformat:
3365 scsvformat = "T"
3366 else:
3367 scsvformat = "F"
3368
3369 if plot:
3370 outblfile = (blfile != "") and \
3371 os.path.exists(os.path.expanduser(
3372 os.path.expandvars(blfile))
3373 )
3374 if outblfile:
3375 blf = open(blfile, "a")
3376
3377 f = fitter()
3378 f.set_function(lpoly=order)
3379
3380 rows = xrange(workscan.nrow())
3381 #if len(rows) > 0: workscan._init_blinfo()
3382
3383 action = "H"
3384 for r in rows:
3385 f.x = workscan._getabcissa(r)
3386 f.y = workscan._getspectrum(r)
3387 if mask:
3388 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
3389 else: # mask=None
3390 f.mask = workscan._getmask(r)
3391
3392 f.data = None
3393 f.fit()
3394
3395 if action != "Y": # skip plotting when accepting all
3396 f.plot(residual=True)
3397 #accept_fit = raw_input("Accept fit ( [y]/n ): ")
3398 #if accept_fit.upper() == "N":
3399 # #workscan._append_blinfo(None, None, None)
3400 # continue
3401 accept_fit = self._get_verify_action("Accept fit?",action)
3402 if r == 0: action = None
3403 if accept_fit.upper() == "N":
3404 continue
3405 elif accept_fit.upper() == "R":
3406 break
3407 elif accept_fit.upper() == "A":
3408 action = "Y"
3409
3410 blpars = f.get_parameters()
3411 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
3412 #workscan._append_blinfo(blpars, masklist, f.mask)
3413 workscan._setspectrum((f.fitter.getresidual()
3414 if getresidual else f.fitter.getfit()), r)
3415
3416 if outblfile:
3417 rms = workscan.get_rms(f.mask, r)
3418 dataout = \
3419 workscan.format_blparams_row(blpars["params"],
3420 blpars["fixed"],
3421 rms, str(masklist),
3422 r, True, csvformat)
3423 blf.write(dataout)
3424
3425 f._p.unmap()
3426 f._p = None
3427
3428 if outblfile:
3429 blf.close()
3430 else:
3431 workscan._poly_baseline(mask, order, getresidual,
3432 pack_progress_params(showprogress,
3433 minnrow),
3434 outlog, scsvformat+blfile)
3435
3436 workscan._add_history("poly_baseline", varlist)
3437
3438 if insitu:
3439 self._assign(workscan)
3440 else:
3441 return workscan
3442
3443 except RuntimeError, e:
3444 raise_fitting_failure_exception(e)
3445
3446 @asaplog_post_dec
3447 def auto_poly_baseline(self, mask=None, order=None, edge=None,
3448 threshold=None, chan_avg_limit=None,
3449 plot=None, insitu=None,
3450 getresidual=None, showprogress=None,
3451 minnrow=None, outlog=None, blfile=None, csvformat=None):
3452 """\
3453 Return a scan which has been baselined (all rows) by a polynomial.
3454 Spectral lines are detected first using linefinder and masked out
3455 to avoid them affecting the baseline solution.
3456
3457 Parameters:
3458 insitu: if False a new scantable is returned.
3459 Otherwise, the scaling is done in-situ
3460 The default is taken from .asaprc (False)
3461 mask: an optional mask retreived from scantable
3462 order: the order of the polynomial (default is 0)
3463 edge: an optional number of channel to drop at
3464 the edge of spectrum. If only one value is
3465 specified, the same number will be dropped
3466 from both sides of the spectrum. Default
3467 is to keep all channels. Nested tuples
3468 represent individual edge selection for
3469 different IFs (a number of spectral channels
3470 can be different)
3471 threshold: the threshold used by line finder. It is
3472 better to keep it large as only strong lines
3473 affect the baseline solution.
3474 chan_avg_limit: a maximum number of consequtive spectral
3475 channels to average during the search of
3476 weak and broad lines. The default is no
3477 averaging (and no search for weak lines).
3478 If such lines can affect the fitted baseline
3479 (e.g. a high order polynomial is fitted),
3480 increase this parameter (usually values up
3481 to 8 are reasonable). Most users of this
3482 method should find the default value sufficient.
3483 plot: plot the fit and the residual. In this each
3484 indivual fit has to be approved, by typing 'y'
3485 or 'n'
3486 getresidual: if False, returns best-fit values instead of
3487 residual. (default is True)
3488 showprogress: show progress status for large data.
3489 default is True.
3490 minnrow: minimum number of input spectra to show.
3491 default is 1000.
3492 outlog: Output the coefficients of the best-fit
3493 function to logger (default is False)
3494 blfile: Name of a text file in which the best-fit
3495 parameter values to be written
3496 (default is "": no file/logger output)
3497 csvformat: if True blfile is csv-formatted, default is False.
3498
3499 Example:
3500 bscan = scan.auto_poly_baseline(order=7, insitu=False)
3501 """
3502
3503 try:
3504 varlist = vars()
3505
3506 if insitu is None:
3507 insitu = rcParams['insitu']
3508 if insitu:
3509 workscan = self
3510 else:
3511 workscan = self.copy()
3512
3513 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
3514 if mask is None: mask = []
3515 if order is None: order = 0
3516 if edge is None: edge = (0, 0)
3517 if threshold is None: threshold = 3
3518 if chan_avg_limit is None: chan_avg_limit = 1
3519 if plot is None: plot = False
3520 if getresidual is None: getresidual = True
3521 if showprogress is None: showprogress = True
3522 if minnrow is None: minnrow = 1000
3523 if outlog is None: outlog = False
3524 if blfile is None: blfile = ''
3525 if csvformat is None: csvformat = False
3526
3527 if csvformat:
3528 scsvformat = "T"
3529 else:
3530 scsvformat = "F"
3531
3532 edge = normalise_edge_param(edge)
3533
3534 if plot:
3535 outblfile = (blfile != "") and \
3536 os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
3537 if outblfile: blf = open(blfile, "a")
3538
3539 from asap.asaplinefind import linefinder
3540 fl = linefinder()
3541 fl.set_options(threshold=threshold, avg_limit=chan_avg_limit)
3542 fl.set_scan(workscan)
3543
3544 f = fitter()
3545 f.set_function(lpoly=order)
3546
3547 rows = xrange(workscan.nrow())
3548 #if len(rows) > 0: workscan._init_blinfo()
3549
3550 action = "H"
3551 for r in rows:
3552 idx = 2*workscan.getif(r)
3553 if mask:
3554 msk = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
3555 else: # mask=None
3556 msk = workscan._getmask(r)
3557 fl.find_lines(r, msk, edge[idx:idx+2])
3558
3559 f.x = workscan._getabcissa(r)
3560 f.y = workscan._getspectrum(r)
3561 f.mask = fl.get_mask()
3562 f.data = None
3563 f.fit()
3564
3565 if action != "Y": # skip plotting when accepting all
3566 f.plot(residual=True)
3567 #accept_fit = raw_input("Accept fit ( [y]/n ): ")
3568 accept_fit = self._get_verify_action("Accept fit?",action)
3569 if r == 0: action = None
3570 if accept_fit.upper() == "N":
3571 #workscan._append_blinfo(None, None, None)
3572 continue
3573 elif accept_fit.upper() == "R":
3574 break
3575 elif accept_fit.upper() == "A":
3576 action = "Y"
3577
3578 blpars = f.get_parameters()
3579 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
3580 #workscan._append_blinfo(blpars, masklist, f.mask)
3581 workscan._setspectrum(
3582 (f.fitter.getresidual() if getresidual
3583 else f.fitter.getfit()), r
3584 )
3585
3586 if outblfile:
3587 rms = workscan.get_rms(f.mask, r)
3588 dataout = \
3589 workscan.format_blparams_row(blpars["params"],
3590 blpars["fixed"],
3591 rms, str(masklist),
3592 r, True, csvformat)
3593 blf.write(dataout)
3594
3595 f._p.unmap()
3596 f._p = None
3597
3598 if outblfile: blf.close()
3599 else:
3600 workscan._auto_poly_baseline(mask, order, edge, threshold,
3601 chan_avg_limit, getresidual,
3602 pack_progress_params(showprogress,
3603 minnrow),
3604 outlog, scsvformat+blfile)
3605
3606 workscan._add_history("auto_poly_baseline", varlist)
3607
3608 if insitu:
3609 self._assign(workscan)
3610 else:
3611 return workscan
3612
3613 except RuntimeError, e:
3614 raise_fitting_failure_exception(e)
3615
3616 def _init_blinfo(self):
3617 """\
3618 Initialise the following three auxiliary members:
3619 blpars : parameters of the best-fit baseline,
3620 masklists : mask data (edge positions of masked channels) and
3621 actualmask : mask data (in boolean list),
3622 to keep for use later (including output to logger/text files).
3623 Used by poly_baseline() and auto_poly_baseline() in case of
3624 'plot=True'.
3625 """
3626 self.blpars = []
3627 self.masklists = []
3628 self.actualmask = []
3629 return
3630
3631 def _append_blinfo(self, data_blpars, data_masklists, data_actualmask):
3632 """\
3633 Append baseline-fitting related info to blpars, masklist and
3634 actualmask.
3635 """
3636 self.blpars.append(data_blpars)
3637 self.masklists.append(data_masklists)
3638 self.actualmask.append(data_actualmask)
3639 return
3640
3641 @asaplog_post_dec
3642 def rotate_linpolphase(self, angle):
3643 """\
3644 Rotate the phase of the complex polarization O=Q+iU correlation.
3645 This is always done in situ in the raw data. So if you call this
3646 function more than once then each call rotates the phase further.
3647
3648 Parameters:
3649
3650 angle: The angle (degrees) to rotate (add) by.
3651
3652 Example::
3653
3654 scan.rotate_linpolphase(2.3)
3655
3656 """
3657 varlist = vars()
3658 self._math._rotate_linpolphase(self, angle)
3659 self._add_history("rotate_linpolphase", varlist)
3660 return
3661
3662 @asaplog_post_dec
3663 def rotate_xyphase(self, angle):
3664 """\
3665 Rotate the phase of the XY correlation. This is always done in situ
3666 in the data. So if you call this function more than once
3667 then each call rotates the phase further.
3668
3669 Parameters:
3670
3671 angle: The angle (degrees) to rotate (add) by.
3672
3673 Example::
3674
3675 scan.rotate_xyphase(2.3)
3676
3677 """
3678 varlist = vars()
3679 self._math._rotate_xyphase(self, angle)
3680 self._add_history("rotate_xyphase", varlist)
3681 return
3682
3683 @asaplog_post_dec
3684 def swap_linears(self):
3685 """\
3686 Swap the linear polarisations XX and YY, or better the first two
3687 polarisations as this also works for ciculars.
3688 """
3689 varlist = vars()
3690 self._math._swap_linears(self)
3691 self._add_history("swap_linears", varlist)
3692 return
3693
3694 @asaplog_post_dec
3695 def invert_phase(self):
3696 """\
3697 Invert the phase of the complex polarisation
3698 """
3699 varlist = vars()
3700 self._math._invert_phase(self)
3701 self._add_history("invert_phase", varlist)
3702 return
3703
3704 @asaplog_post_dec
3705 def add(self, offset, insitu=None):
3706 """\
3707 Return a scan where all spectra have the offset added
3708
3709 Parameters:
3710
3711 offset: the offset
3712
3713 insitu: if False a new scantable is returned.
3714 Otherwise, the scaling is done in-situ
3715 The default is taken from .asaprc (False)
3716
3717 """
3718 if insitu is None: insitu = rcParams['insitu']
3719 self._math._setinsitu(insitu)
3720 varlist = vars()
3721 s = scantable(self._math._unaryop(self, offset, "ADD", False))
3722 s._add_history("add", varlist)
3723 if insitu:
3724 self._assign(s)
3725 else:
3726 return s
3727
3728 @asaplog_post_dec
3729 def scale(self, factor, tsys=True, insitu=None):
3730 """\
3731
3732 Return a scan where all spectra are scaled by the given 'factor'
3733
3734 Parameters:
3735
3736 factor: the scaling factor (float or 1D float list)
3737
3738 insitu: if False a new scantable is returned.
3739 Otherwise, the scaling is done in-situ
3740 The default is taken from .asaprc (False)
3741
3742 tsys: if True (default) then apply the operation to Tsys
3743 as well as the data
3744
3745 """
3746 if insitu is None: insitu = rcParams['insitu']
3747 self._math._setinsitu(insitu)
3748 varlist = vars()
3749 s = None
3750 import numpy
3751 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
3752 if isinstance(factor[0], list) or isinstance(factor[0],
3753 numpy.ndarray):
3754 from asapmath import _array2dOp
3755 s = _array2dOp( self, factor, "MUL", tsys, insitu )
3756 else:
3757 s = scantable( self._math._arrayop( self, factor,
3758 "MUL", tsys ) )
3759 else:
3760 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
3761 s._add_history("scale", varlist)
3762 if insitu:
3763 self._assign(s)
3764 else:
3765 return s
3766
3767 @preserve_selection
3768 def set_sourcetype(self, match, matchtype="pattern",
3769 sourcetype="reference"):
3770 """\
3771 Set the type of the source to be an source or reference scan
3772 using the provided pattern.
3773
3774 Parameters:
3775
3776 match: a Unix style pattern, regular expression or selector
3777
3778 matchtype: 'pattern' (default) UNIX style pattern or
3779 'regex' regular expression
3780
3781 sourcetype: the type of the source to use (source/reference)
3782
3783 """
3784 varlist = vars()
3785 stype = -1
3786 if sourcetype.lower().startswith("r") or sourcetype.lower() == "off":
3787 stype = 1
3788 elif sourcetype.lower().startswith("s") or sourcetype.lower() == "on":
3789 stype = 0
3790 else:
3791 raise ValueError("Illegal sourcetype use s(ource)/on or r(eference)/off")
3792 if matchtype.lower().startswith("p"):
3793 matchtype = "pattern"
3794 elif matchtype.lower().startswith("r"):
3795 matchtype = "regex"
3796 else:
3797 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
3798 sel = selector()
3799 if isinstance(match, selector):
3800 sel = match
3801 else:
3802 sel.set_query("SRCNAME=%s('%s')" % (matchtype, match))
3803 self.set_selection(sel)
3804 self._setsourcetype(stype)
3805 self._add_history("set_sourcetype", varlist)
3806
3807 @asaplog_post_dec
3808 @preserve_selection
3809 def auto_quotient(self, preserve=True, mode='paired', verify=False):
3810 """\
3811 This function allows to build quotients automatically.
3812 It assumes the observation to have the same number of
3813 "ons" and "offs"
3814
3815 Parameters:
3816
3817 preserve: you can preserve (default) the continuum or
3818 remove it. The equations used are
3819
3820 preserve: Output = Toff * (on/off) - Toff
3821
3822 remove: Output = Toff * (on/off) - Ton
3823
3824 mode: the on/off detection mode
3825 'paired' (default)
3826 identifies 'off' scans by the
3827 trailing '_R' (Mopra/Parkes) or
3828 '_e'/'_w' (Tid) and matches
3829 on/off pairs from the observing pattern
3830 'time'
3831 finds the closest off in time
3832
3833 .. todo:: verify argument is not implemented
3834
3835 """
3836 varlist = vars()
3837 modes = ["time", "paired"]
3838 if not mode in modes:
3839 msg = "please provide valid mode. Valid modes are %s" % (modes)
3840 raise ValueError(msg)
3841 s = None
3842 if mode.lower() == "paired":
3843 sel = self.get_selection()
3844 sel.set_query("SRCTYPE==psoff")
3845 self.set_selection(sel)
3846 offs = self.copy()
3847 sel.set_query("SRCTYPE==pson")
3848 self.set_selection(sel)
3849 ons = self.copy()
3850 s = scantable(self._math._quotient(ons, offs, preserve))
3851 elif mode.lower() == "time":
3852 s = scantable(self._math._auto_quotient(self, mode, preserve))
3853 s._add_history("auto_quotient", varlist)
3854 return s
3855
3856 @asaplog_post_dec
3857 def mx_quotient(self, mask = None, weight='median', preserve=True):
3858 """\
3859 Form a quotient using "off" beams when observing in "MX" mode.
3860
3861 Parameters:
3862
3863 mask: an optional mask to be used when weight == 'stddev'
3864
3865 weight: How to average the off beams. Default is 'median'.
3866
3867 preserve: you can preserve (default) the continuum or
3868 remove it. The equations used are:
3869
3870 preserve: Output = Toff * (on/off) - Toff
3871
3872 remove: Output = Toff * (on/off) - Ton
3873
3874 """
3875 mask = mask or ()
3876 varlist = vars()
3877 on = scantable(self._math._mx_extract(self, 'on'))
3878 preoff = scantable(self._math._mx_extract(self, 'off'))
3879 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
3880 from asapmath import quotient
3881 q = quotient(on, off, preserve)
3882 q._add_history("mx_quotient", varlist)
3883 return q
3884
3885 @asaplog_post_dec
3886 def freq_switch(self, insitu=None):
3887 """\
3888 Apply frequency switching to the data.
3889
3890 Parameters:
3891
3892 insitu: if False a new scantable is returned.
3893 Otherwise, the swictching is done in-situ
3894 The default is taken from .asaprc (False)
3895
3896 """
3897 if insitu is None: insitu = rcParams['insitu']
3898 self._math._setinsitu(insitu)
3899 varlist = vars()
3900 s = scantable(self._math._freqswitch(self))
3901 s._add_history("freq_switch", varlist)
3902 if insitu:
3903 self._assign(s)
3904 else:
3905 return s
3906
3907 @asaplog_post_dec
3908 def recalc_azel(self):
3909 """Recalculate the azimuth and elevation for each position."""
3910 varlist = vars()
3911 self._recalcazel()
3912 self._add_history("recalc_azel", varlist)
3913 return
3914
3915 @asaplog_post_dec
3916 def __add__(self, other):
3917 """
3918 implicit on all axes and on Tsys
3919 """
3920 varlist = vars()
3921 s = self.__op( other, "ADD" )
3922 s._add_history("operator +", varlist)
3923 return s
3924
3925 @asaplog_post_dec
3926 def __sub__(self, other):
3927 """
3928 implicit on all axes and on Tsys
3929 """
3930 varlist = vars()
3931 s = self.__op( other, "SUB" )
3932 s._add_history("operator -", varlist)
3933 return s
3934
3935 @asaplog_post_dec
3936 def __mul__(self, other):
3937 """
3938 implicit on all axes and on Tsys
3939 """
3940 varlist = vars()
3941 s = self.__op( other, "MUL" ) ;
3942 s._add_history("operator *", varlist)
3943 return s
3944
3945
3946 @asaplog_post_dec
3947 def __div__(self, other):
3948 """
3949 implicit on all axes and on Tsys
3950 """
3951 varlist = vars()
3952 s = self.__op( other, "DIV" )
3953 s._add_history("operator /", varlist)
3954 return s
3955
3956 @asaplog_post_dec
3957 def __op( self, other, mode ):
3958 s = None
3959 if isinstance(other, scantable):
3960 s = scantable(self._math._binaryop(self, other, mode))
3961 elif isinstance(other, float):
3962 if other == 0.0:
3963 raise ZeroDivisionError("Dividing by zero is not recommended")
3964 s = scantable(self._math._unaryop(self, other, mode, False))
3965 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3966 if isinstance(other[0], list) \
3967 or isinstance(other[0], numpy.ndarray):
3968 from asapmath import _array2dOp
3969 s = _array2dOp( self, other, mode, False )
3970 else:
3971 s = scantable( self._math._arrayop( self, other,
3972 mode, False ) )
3973 else:
3974 raise TypeError("Other input is not a scantable or float value")
3975 return s
3976
3977 @asaplog_post_dec
3978 def get_fit(self, row=0):
3979 """\
3980 Print or return the stored fits for a row in the scantable
3981
3982 Parameters:
3983
3984 row: the row which the fit has been applied to.
3985
3986 """
3987 if row > self.nrow():
3988 return
3989 from asap.asapfit import asapfit
3990 fit = asapfit(self._getfit(row))
3991 asaplog.push( '%s' %(fit) )
3992 return fit.as_dict()
3993
3994 @preserve_selection
3995 def flag_nans(self):
3996 """\
3997 Utility function to flag NaN values in the scantable.
3998 """
3999 import numpy
4000 basesel = self.get_selection()
4001 for i in range(self.nrow()):
4002 sel = self.get_row_selector(i)
4003 self.set_selection(basesel+sel)
4004 nans = numpy.isnan(self._getspectrum(0))
4005 if numpy.any(nans):
4006 bnans = [ bool(v) for v in nans]
4007 self.flag(bnans)
4008
4009 def get_row_selector(self, rowno):
4010 return selector(rows=[rowno])
4011
4012 def _add_history(self, funcname, parameters):
4013 if not rcParams['scantable.history']:
4014 return
4015 # create date
4016 sep = "##"
4017 from datetime import datetime
4018 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
4019 hist = dstr+sep
4020 hist += funcname+sep#cdate+sep
4021 if parameters.has_key('self'):
4022 del parameters['self']
4023 for k, v in parameters.iteritems():
4024 if type(v) is dict:
4025 for k2, v2 in v.iteritems():
4026 hist += k2
4027 hist += "="
4028 if isinstance(v2, scantable):
4029 hist += 'scantable'
4030 elif k2 == 'mask':
4031 if isinstance(v2, list) or isinstance(v2, tuple):
4032 hist += str(self._zip_mask(v2))
4033 else:
4034 hist += str(v2)
4035 else:
4036 hist += str(v2)
4037 else:
4038 hist += k
4039 hist += "="
4040 if isinstance(v, scantable):
4041 hist += 'scantable'
4042 elif k == 'mask':
4043 if isinstance(v, list) or isinstance(v, tuple):
4044 hist += str(self._zip_mask(v))
4045 else:
4046 hist += str(v)
4047 else:
4048 hist += str(v)
4049 hist += sep
4050 hist = hist[:-2] # remove trailing '##'
4051 self._addhistory(hist)
4052
4053
4054 def _zip_mask(self, mask):
4055 mask = list(mask)
4056 i = 0
4057 segments = []
4058 while mask[i:].count(1):
4059 i += mask[i:].index(1)
4060 if mask[i:].count(0):
4061 j = i + mask[i:].index(0)
4062 else:
4063 j = len(mask)
4064 segments.append([i, j])
4065 i = j
4066 return segments
4067
4068 def _get_ordinate_label(self):
4069 fu = "("+self.get_fluxunit()+")"
4070 import re
4071 lbl = "Intensity"
4072 if re.match(".K.", fu):
4073 lbl = "Brightness Temperature "+ fu
4074 elif re.match(".Jy.", fu):
4075 lbl = "Flux density "+ fu
4076 return lbl
4077
4078 def _check_ifs(self):
4079# return len(set([self.nchan(i) for i in self.getifnos()])) == 1
4080 nchans = [self.nchan(i) for i in self.getifnos()]
4081 nchans = filter(lambda t: t > 0, nchans)
4082 return (sum(nchans)/len(nchans) == nchans[0])
4083
4084 @asaplog_post_dec
4085 def _fill(self, names, unit, average, opts={}):
4086 first = True
4087 fullnames = []
4088 for name in names:
4089 name = os.path.expandvars(name)
4090 name = os.path.expanduser(name)
4091 if not os.path.exists(name):
4092 msg = "File '%s' does not exists" % (name)
4093 raise IOError(msg)
4094 fullnames.append(name)
4095 if average:
4096 asaplog.push('Auto averaging integrations')
4097 stype = int(rcParams['scantable.storage'].lower() == 'disk')
4098 for name in fullnames:
4099 tbl = Scantable(stype)
4100 if is_ms( name ):
4101 r = msfiller( tbl )
4102 else:
4103 r = filler( tbl )
4104 msg = "Importing %s..." % (name)
4105 asaplog.push(msg, False)
4106 r.open(name, opts)
4107 rx = rcParams['scantable.reference']
4108 r.setreferenceexpr(rx)
4109 r.fill()
4110 if average:
4111 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
4112 if not first:
4113 tbl = self._math._merge([self, tbl])
4114 Scantable.__init__(self, tbl)
4115 r.close()
4116 del r, tbl
4117 first = False
4118 #flush log
4119 asaplog.post()
4120 if unit is not None:
4121 self.set_fluxunit(unit)
4122 if not is_casapy():
4123 self.set_freqframe(rcParams['scantable.freqframe'])
4124
4125 def _get_verify_action( self, msg, action=None ):
4126 valid_act = ['Y', 'N', 'A', 'R']
4127 if not action or not isinstance(action, str):
4128 action = raw_input("%s [Y/n/a/r] (h for help): " % msg)
4129 if action == '':
4130 return "Y"
4131 elif (action.upper()[0] in valid_act):
4132 return action.upper()[0]
4133 elif (action.upper()[0] in ['H','?']):
4134 print "Available actions of verification [Y|n|a|r]"
4135 print " Y : Yes for current data (default)"
4136 print " N : No for current data"
4137 print " A : Accept all in the following and exit from verification"
4138 print " R : Reject all in the following and exit from verification"
4139 print " H or ?: help (show this message)"
4140 return self._get_verify_action(msg)
4141 else:
4142 return 'Y'
4143
4144 def __getitem__(self, key):
4145 if key < 0:
4146 key += self.nrow()
4147 if key >= self.nrow():
4148 raise IndexError("Row index out of range.")
4149 return self._getspectrum(key)
4150
4151 def __setitem__(self, key, value):
4152 if key < 0:
4153 key += self.nrow()
4154 if key >= self.nrow():
4155 raise IndexError("Row index out of range.")
4156 if not hasattr(value, "__len__") or \
4157 len(value) > self.nchan(self.getif(key)):
4158 raise ValueError("Spectrum length doesn't match.")
4159 return self._setspectrum(value, key)
4160
4161 def __len__(self):
4162 return self.nrow()
4163
4164 def __iter__(self):
4165 for i in range(len(self)):
4166 yield self[i]
Note: See TracBrowser for help on using the repository browser.