source: trunk/python/scantable.py@ 2408

Last change on this file since 2408 was 2406, checked in by Takeshi Nakazato, 13 years ago

New Development: No

JIRA Issue: No

Ready for Test: Yes

Interface Changes: Yes/No

What Interface Changed: Please list interface changes

Test Programs: List test programs

Put in Release Notes: Yes/No

Module(s): Module Names change impacts.

Description: Describe your changes here...

Added Python interface (scantable.get_tsysspectrum) for the method
to get channel dependent Tsys, which is just calling
Scantable._gettsysspectrum.


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 137.0 KB
Line 
1"""This module defines the scantable class."""
2
3import os
4import tempfile
5import numpy
6try:
7 from functools import wraps as wraps_dec
8except ImportError:
9 from asap.compatibility import wraps as wraps_dec
10
11from asap.env import is_casapy
12from asap._asap import Scantable
13from asap._asap import filler, msfiller
14from asap.parameters import rcParams
15from asap.logging import asaplog, asaplog_post_dec
16from asap.selector import selector
17from asap.linecatalog import linecatalog
18from asap.coordinate import coordinate
19from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
20from asap.asapfitter import fitter
21
22
23def preserve_selection(func):
24 @wraps_dec(func)
25 def wrap(obj, *args, **kw):
26 basesel = obj.get_selection()
27 try:
28 val = func(obj, *args, **kw)
29 finally:
30 obj.set_selection(basesel)
31 return val
32 return wrap
33
34def is_scantable(filename):
35 """Is the given file a scantable?
36
37 Parameters:
38
39 filename: the name of the file/directory to test
40
41 """
42 if ( os.path.isdir(filename)
43 and os.path.exists(filename+'/table.info')
44 and os.path.exists(filename+'/table.dat') ):
45 f=open(filename+'/table.info')
46 l=f.readline()
47 f.close()
48 #if ( l.find('Scantable') != -1 ):
49 if ( l.find('Measurement Set') == -1 ):
50 return True
51 else:
52 return False
53 else:
54 return False
55## return (os.path.isdir(filename)
56## and not os.path.exists(filename+'/table.f1')
57## and os.path.exists(filename+'/table.info'))
58
59def is_ms(filename):
60 """Is the given file a MeasurementSet?
61
62 Parameters:
63
64 filename: the name of the file/directory to test
65
66 """
67 if ( os.path.isdir(filename)
68 and os.path.exists(filename+'/table.info')
69 and os.path.exists(filename+'/table.dat') ):
70 f=open(filename+'/table.info')
71 l=f.readline()
72 f.close()
73 if ( l.find('Measurement Set') != -1 ):
74 return True
75 else:
76 return False
77 else:
78 return False
79
80def normalise_edge_param(edge):
81 """\
82 Convert a given edge value to a one-dimensional array that can be
83 given to baseline-fitting/subtraction functions.
84 The length of the output value will be an even because values for
85 the both sides of spectra are to be contained for each IF. When
86 the length is 2, the values will be applied to all IFs. If the length
87 is larger than 2, it will be 2*ifnos().
88 Accepted format of edge include:
89 * an integer - will be used for both sides of spectra of all IFs.
90 e.g. 10 is converted to [10,10]
91 * an empty list/tuple [] - converted to [0, 0] and used for all IFs.
92 * a list/tuple containing an integer - same as the above case.
93 e.g. [10] is converted to [10,10]
94 * a list/tuple containing two integers - will be used for all IFs.
95 e.g. [5,10] is output as it is. no need to convert.
96 * a list/tuple of lists/tuples containing TWO integers -
97 each element of edge will be used for each IF.
98 e.g. [[5,10],[15,20]] - [5,10] for IF[0] and [15,20] for IF[1].
99
100 If an element contains the same integer values, the input 'edge'
101 parameter can be given in a simpler shape in the following cases:
102 ** when len(edge)!=2
103 any elements containing the same values can be replaced
104 to single integers.
105 e.g. [[15,15]] can be simplified to [15] (or [15,15] or 15 also).
106 e.g. [[1,1],[2,2],[3,3]] can be simplified to [1,2,3].
107 ** when len(edge)=2
108 care is needed for this case: ONLY ONE of the
109 elements can be a single integer,
110 e.g. [[5,5],[10,10]] can be simplified to [5,[10,10]]
111 or [[5,5],10], but can NOT be simplified to [5,10].
112 when [5,10] given, it is interpreted as
113 [[5,10],[5,10],[5,10],...] instead, as shown before.
114 """
115 from asap import _is_sequence_or_number as _is_valid
116 if isinstance(edge, list) or isinstance(edge, tuple):
117 for edgepar in edge:
118 if not _is_valid(edgepar, int):
119 raise ValueError, "Each element of the 'edge' tuple has \
120 to be a pair of integers or an integer."
121 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
122 if len(edgepar) != 2:
123 raise ValueError, "Each element of the 'edge' tuple has \
124 to be a pair of integers or an integer."
125 else:
126 if not _is_valid(edge, int):
127 raise ValueError, "Parameter 'edge' has to be an integer or a \
128 pair of integers specified as a tuple. \
129 Nested tuples are allowed \
130 to make individual selection for different IFs."
131
132
133 if isinstance(edge, int):
134 edge = [ edge, edge ] # e.g. 3 => [3,3]
135 elif isinstance(edge, list) or isinstance(edge, tuple):
136 if len(edge) == 0:
137 edge = [0, 0] # e.g. [] => [0,0]
138 elif len(edge) == 1:
139 if isinstance(edge[0], int):
140 edge = [ edge[0], edge[0] ] # e.g. [1] => [1,1]
141
142 commonedge = True
143 if len(edge) > 2: commonedge = False
144 else:
145 for edgepar in edge:
146 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
147 commonedge = False
148 break
149
150 if commonedge:
151 if len(edge) > 1:
152 norm_edge = edge
153 else:
154 norm_edge = edge + edge
155 else:
156 norm_edge = []
157 for edgepar in edge:
158 if isinstance(edgepar, int):
159 norm_edge += [edgepar, edgepar]
160 else:
161 norm_edge += edgepar
162
163 return norm_edge
164
165def raise_fitting_failure_exception(e):
166 msg = "The fit failed, possibly because it didn't converge."
167 if rcParams["verbose"]:
168 asaplog.push(str(e))
169 asaplog.push(str(msg))
170 else:
171 raise RuntimeError(str(e)+'\n'+msg)
172
173def pack_progress_params(showprogress, minnrow):
174 return str(showprogress).lower() + ',' + str(minnrow)
175
176class scantable(Scantable):
177 """\
178 The ASAP container for scans (single-dish data).
179 """
180
181 @asaplog_post_dec
182 def __init__(self, filename, average=None, unit=None, parallactify=None,
183 **args):
184 """\
185 Create a scantable from a saved one or make a reference
186
187 Parameters:
188
189 filename: the name of an asap table on disk
190 or
191 the name of a rpfits/sdfits/ms file
192 (integrations within scans are auto averaged
193 and the whole file is read) or
194 [advanced] a reference to an existing scantable
195
196 average: average all integrations withinb a scan on read.
197 The default (True) is taken from .asaprc.
198
199 unit: brightness unit; must be consistent with K or Jy.
200 Over-rides the default selected by the filler
201 (input rpfits/sdfits/ms) or replaces the value
202 in existing scantables
203
204 antenna: for MeasurementSet input data only:
205 Antenna selection. integer (id) or string
206 (name or id).
207
208 parallactify: Indicate that the data had been parallactified.
209 Default (false) is taken from rc file.
210
211 """
212 if average is None:
213 average = rcParams['scantable.autoaverage']
214 parallactify = parallactify or rcParams['scantable.parallactify']
215 varlist = vars()
216 from asap._asap import stmath
217 self._math = stmath( rcParams['insitu'] )
218 if isinstance(filename, Scantable):
219 Scantable.__init__(self, filename)
220 else:
221 if isinstance(filename, str):
222 filename = os.path.expandvars(filename)
223 filename = os.path.expanduser(filename)
224 if not os.path.exists(filename):
225 s = "File '%s' not found." % (filename)
226 raise IOError(s)
227 if is_scantable(filename):
228 ondisk = rcParams['scantable.storage'] == 'disk'
229 Scantable.__init__(self, filename, ondisk)
230 if unit is not None:
231 self.set_fluxunit(unit)
232 if average:
233 self._assign( self.average_time( scanav=True ) )
234 # do not reset to the default freqframe
235 #self.set_freqframe(rcParams['scantable.freqframe'])
236 elif is_ms(filename):
237 # Measurement Set
238 opts={'ms': {}}
239 mskeys=['getpt','antenna']
240 for key in mskeys:
241 if key in args.keys():
242 opts['ms'][key] = args[key]
243 self._fill([filename], unit, average, opts)
244 elif os.path.isfile(filename):
245 self._fill([filename], unit, average)
246 # only apply to new data not "copy constructor"
247 self.parallactify(parallactify)
248 else:
249 msg = "The given file '%s'is not a valid " \
250 "asap table." % (filename)
251 raise IOError(msg)
252 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
253 and isinstance(filename[-1], str):
254 self._fill(filename, unit, average)
255 self.parallactify(parallactify)
256 self._add_history("scantable", varlist)
257
258 @asaplog_post_dec
259 def save(self, name=None, format=None, overwrite=False):
260 """\
261 Store the scantable on disk. This can be an asap (aips++) Table,
262 SDFITS or MS2 format.
263
264 Parameters:
265
266 name: the name of the outputfile. For format "ASCII"
267 this is the root file name (data in 'name'.txt
268 and header in 'name'_header.txt)
269
270 format: an optional file format. Default is ASAP.
271 Allowed are:
272
273 * 'ASAP' (save as ASAP [aips++] Table),
274 * 'SDFITS' (save as SDFITS file)
275 * 'ASCII' (saves as ascii text file)
276 * 'MS2' (saves as an casacore MeasurementSet V2)
277 * 'FITS' (save as image FITS - not readable by
278 class)
279 * 'CLASS' (save as FITS readable by CLASS)
280
281 overwrite: If the file should be overwritten if it exists.
282 The default False is to return with warning
283 without writing the output. USE WITH CARE.
284
285 Example::
286
287 scan.save('myscan.asap')
288 scan.save('myscan.sdfits', 'SDFITS')
289
290 """
291 from os import path
292 format = format or rcParams['scantable.save']
293 suffix = '.'+format.lower()
294 if name is None or name == "":
295 name = 'scantable'+suffix
296 msg = "No filename given. Using default name %s..." % name
297 asaplog.push(msg)
298 name = path.expandvars(name)
299 if path.isfile(name) or path.isdir(name):
300 if not overwrite:
301 msg = "File %s exists." % name
302 raise IOError(msg)
303 format2 = format.upper()
304 if format2 == 'ASAP':
305 self._save(name)
306 elif format2 == 'MS2':
307 msopt = {'ms': {'overwrite': overwrite } }
308 from asap._asap import mswriter
309 writer = mswriter( self )
310 writer.write( name, msopt )
311 else:
312 from asap._asap import stwriter as stw
313 writer = stw(format2)
314 writer.write(self, name)
315 return
316
317 def copy(self):
318 """Return a copy of this scantable.
319
320 *Note*:
321
322 This makes a full (deep) copy. scan2 = scan1 makes a reference.
323
324 Example::
325
326 copiedscan = scan.copy()
327
328 """
329 sd = scantable(Scantable._copy(self))
330 return sd
331
332 def drop_scan(self, scanid=None):
333 """\
334 Return a new scantable where the specified scan number(s) has(have)
335 been dropped.
336
337 Parameters:
338
339 scanid: a (list of) scan number(s)
340
341 """
342 from asap import _is_sequence_or_number as _is_valid
343 from asap import _to_list
344 from asap import unique
345 if not _is_valid(scanid):
346 raise RuntimeError( 'Please specify a scanno to drop from the'
347 ' scantable' )
348 scanid = _to_list(scanid)
349 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
350 for sid in scanid: allscans.remove(sid)
351 if len(allscans) == 0:
352 raise ValueError("Can't remove all scans")
353 sel = selector(scans=allscans)
354 return self._select_copy(sel)
355
356 def _select_copy(self, selection):
357 orig = self.get_selection()
358 self.set_selection(orig+selection)
359 cp = self.copy()
360 self.set_selection(orig)
361 return cp
362
363 def get_scan(self, scanid=None):
364 """\
365 Return a specific scan (by scanno) or collection of scans (by
366 source name) in a new scantable.
367
368 *Note*:
369
370 See scantable.drop_scan() for the inverse operation.
371
372 Parameters:
373
374 scanid: a (list of) scanno or a source name, unix-style
375 patterns are accepted for source name matching, e.g.
376 '*_R' gets all 'ref scans
377
378 Example::
379
380 # get all scans containing the source '323p459'
381 newscan = scan.get_scan('323p459')
382 # get all 'off' scans
383 refscans = scan.get_scan('*_R')
384 # get a susbset of scans by scanno (as listed in scan.summary())
385 newscan = scan.get_scan([0, 2, 7, 10])
386
387 """
388 if scanid is None:
389 raise RuntimeError( 'Please specify a scan no or name to '
390 'retrieve from the scantable' )
391 try:
392 bsel = self.get_selection()
393 sel = selector()
394 if type(scanid) is str:
395 sel.set_name(scanid)
396 return self._select_copy(sel)
397 elif type(scanid) is int:
398 sel.set_scans([scanid])
399 return self._select_copy(sel)
400 elif type(scanid) is list:
401 sel.set_scans(scanid)
402 return self._select_copy(sel)
403 else:
404 msg = "Illegal scanid type, use 'int' or 'list' if ints."
405 raise TypeError(msg)
406 except RuntimeError:
407 raise
408
409 def __str__(self):
410 tempFile = tempfile.NamedTemporaryFile()
411 Scantable._summary(self, tempFile.name)
412 tempFile.seek(0)
413 asaplog.clear()
414 return tempFile.file.read()
415
416 @asaplog_post_dec
417 def summary(self, filename=None):
418 """\
419 Print a summary of the contents of this scantable.
420
421 Parameters:
422
423 filename: the name of a file to write the putput to
424 Default - no file output
425
426 """
427# info = Scantable._summary(self)
428 if filename is not None:
429 if filename is "":
430 filename = 'scantable_summary.txt'
431 from os.path import expandvars, isdir
432 filename = expandvars(filename)
433# if not isdir(filename):
434# data = open(filename, 'w')
435# data.write(info)
436# data.close()
437# else:
438 if isdir(filename):
439 msg = "Illegal file name '%s'." % (filename)
440 raise IOError(msg)
441 else:
442 filename = ""
443 Scantable._summary(self, filename)
444# info = Scantable._summary(self, filename)
445# return page(info)
446
447 def get_spectrum(self, rowno):
448 """Return the spectrum for the current row in the scantable as a list.
449
450 Parameters:
451
452 rowno: the row number to retrieve the spectrum from
453
454 """
455 return self._getspectrum(rowno)
456
457 def get_mask(self, rowno):
458 """Return the mask for the current row in the scantable as a list.
459
460 Parameters:
461
462 rowno: the row number to retrieve the mask from
463
464 """
465 return self._getmask(rowno)
466
467 def set_spectrum(self, spec, rowno):
468 """Set the spectrum for the current row in the scantable.
469
470 Parameters:
471
472 spec: the new spectrum
473
474 rowno: the row number to set the spectrum for
475
476 """
477 assert(len(spec) == self.nchan(self.getif(rowno)))
478 return self._setspectrum(spec, rowno)
479
480 def get_coordinate(self, rowno):
481 """Return the (spectral) coordinate for a a given 'rowno'.
482
483 *Note*:
484
485 * This coordinate is only valid until a scantable method modifies
486 the frequency axis.
487 * This coordinate does contain the original frequency set-up
488 NOT the new frame. The conversions however are done using the user
489 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
490 use scantable.freq_align first. Without it there is no closure,
491 i.e.::
492
493 c = myscan.get_coordinate(0)
494 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
495
496 Parameters:
497
498 rowno: the row number for the spectral coordinate
499
500 """
501 return coordinate(Scantable.get_coordinate(self, rowno))
502
503 def get_selection(self):
504 """\
505 Get the selection object currently set on this scantable.
506
507 Example::
508
509 sel = scan.get_selection()
510 sel.set_ifs(0) # select IF 0
511 scan.set_selection(sel) # apply modified selection
512
513 """
514 return selector(self._getselection())
515
516 def set_selection(self, selection=None, **kw):
517 """\
518 Select a subset of the data. All following operations on this scantable
519 are only applied to thi selection.
520
521 Parameters:
522
523 selection: a selector object (default unset the selection), or
524 any combination of "pols", "ifs", "beams", "scans",
525 "cycles", "name", "query"
526
527 Examples::
528
529 sel = selector() # create a selection object
530 self.set_scans([0, 3]) # select SCANNO 0 and 3
531 scan.set_selection(sel) # set the selection
532 scan.summary() # will only print summary of scanno 0 an 3
533 scan.set_selection() # unset the selection
534 # or the equivalent
535 scan.set_selection(scans=[0,3])
536 scan.summary() # will only print summary of scanno 0 an 3
537 scan.set_selection() # unset the selection
538
539 """
540 if selection is None:
541 # reset
542 if len(kw) == 0:
543 selection = selector()
544 else:
545 # try keywords
546 for k in kw:
547 if k not in selector.fields:
548 raise KeyError("Invalid selection key '%s', "
549 "valid keys are %s" % (k,
550 selector.fields))
551 selection = selector(**kw)
552 self._setselection(selection)
553
554 def get_row(self, row=0, insitu=None):
555 """\
556 Select a row in the scantable.
557 Return a scantable with single row.
558
559 Parameters:
560
561 row: row no of integration, default is 0.
562 insitu: if False a new scantable is returned. Otherwise, the
563 scaling is done in-situ. The default is taken from .asaprc
564 (False)
565
566 """
567 if insitu is None:
568 insitu = rcParams['insitu']
569 if not insitu:
570 workscan = self.copy()
571 else:
572 workscan = self
573 # Select a row
574 sel = selector()
575 sel.set_rows([row])
576 workscan.set_selection(sel)
577 if not workscan.nrow() == 1:
578 msg = "Could not identify single row. %d rows selected." \
579 % (workscan.nrow())
580 raise RuntimeError(msg)
581 if insitu:
582 self._assign(workscan)
583 else:
584 return workscan
585
586 @asaplog_post_dec
587 def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
588 """\
589 Determine the specified statistic of the current beam/if/pol
590 Takes a 'mask' as an optional parameter to specify which
591 channels should be excluded.
592
593 Parameters:
594
595 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
596 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
597
598 mask: an optional mask specifying where the statistic
599 should be determined.
600
601 form: format string to print statistic values
602
603 row: row number of spectrum to process.
604 (default is None: for all rows)
605
606 Example:
607 scan.set_unit('channel')
608 msk = scan.create_mask([100, 200], [500, 600])
609 scan.stats(stat='mean', mask=m)
610
611 """
612 mask = mask or []
613 if not self._check_ifs():
614 raise ValueError("Cannot apply mask as the IFs have different "
615 "number of channels. Please use setselection() "
616 "to select individual IFs")
617 rtnabc = False
618 if stat.lower().endswith('_abc'): rtnabc = True
619 getchan = False
620 if stat.lower().startswith('min') or stat.lower().startswith('max'):
621 chan = self._math._minmaxchan(self, mask, stat)
622 getchan = True
623 statvals = []
624 if not rtnabc:
625 if row == None:
626 statvals = self._math._stats(self, mask, stat)
627 else:
628 statvals = self._math._statsrow(self, mask, stat, int(row))
629
630 #def cb(i):
631 # return statvals[i]
632
633 #return self._row_callback(cb, stat)
634
635 label=stat
636 #callback=cb
637 out = ""
638 #outvec = []
639 sep = '-'*50
640
641 if row == None:
642 rows = xrange(self.nrow())
643 elif isinstance(row, int):
644 rows = [ row ]
645
646 for i in rows:
647 refstr = ''
648 statunit= ''
649 if getchan:
650 qx, qy = self.chan2data(rowno=i, chan=chan[i])
651 if rtnabc:
652 statvals.append(qx['value'])
653 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
654 statunit= '['+qx['unit']+']'
655 else:
656 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
657
658 tm = self._gettime(i)
659 src = self._getsourcename(i)
660 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
661 out += 'Time[%s]:\n' % (tm)
662 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (self.getbeam(i))
663 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
664 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
665 #outvec.append(callback(i))
666 if len(rows) > 1:
667 # out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
668 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
669 else:
670 # out += ('= %'+form) % (outvec[0]) +' '+refstr+'\n'
671 out += ('= %'+form) % (statvals[0]) +' '+refstr+'\n'
672 out += sep+"\n"
673
674 import os
675 if os.environ.has_key( 'USER' ):
676 usr = os.environ['USER']
677 else:
678 import commands
679 usr = commands.getoutput( 'whoami' )
680 tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
681 f = open(tmpfile,'w')
682 print >> f, sep
683 print >> f, ' %s %s' % (label, statunit)
684 print >> f, sep
685 print >> f, out
686 f.close()
687 f = open(tmpfile,'r')
688 x = f.readlines()
689 f.close()
690 asaplog.push(''.join(x), False)
691
692 return statvals
693
694 def chan2data(self, rowno=0, chan=0):
695 """\
696 Returns channel/frequency/velocity and spectral value
697 at an arbitrary row and channel in the scantable.
698
699 Parameters:
700
701 rowno: a row number in the scantable. Default is the
702 first row, i.e. rowno=0
703
704 chan: a channel in the scantable. Default is the first
705 channel, i.e. pos=0
706
707 """
708 if isinstance(rowno, int) and isinstance(chan, int):
709 qx = {'unit': self.get_unit(),
710 'value': self._getabcissa(rowno)[chan]}
711 qy = {'unit': self.get_fluxunit(),
712 'value': self._getspectrum(rowno)[chan]}
713 return qx, qy
714
715 def stddev(self, mask=None):
716 """\
717 Determine the standard deviation of the current beam/if/pol
718 Takes a 'mask' as an optional parameter to specify which
719 channels should be excluded.
720
721 Parameters:
722
723 mask: an optional mask specifying where the standard
724 deviation should be determined.
725
726 Example::
727
728 scan.set_unit('channel')
729 msk = scan.create_mask([100, 200], [500, 600])
730 scan.stddev(mask=m)
731
732 """
733 return self.stats(stat='stddev', mask=mask);
734
735
736 def get_column_names(self):
737 """\
738 Return a list of column names, which can be used for selection.
739 """
740 return list(Scantable.get_column_names(self))
741
742 def get_tsys(self, row=-1):
743 """\
744 Return the System temperatures.
745
746 Parameters:
747
748 row: the rowno to get the information for. (default all rows)
749
750 Returns:
751
752 a list of Tsys values for the current selection
753
754 """
755 if row > -1:
756 return self._get_column(self._gettsys, row)
757 return self._row_callback(self._gettsys, "Tsys")
758
759 def get_tsysspectrum(self, row=-1):
760 """\
761 Return the channel dependent system temperatures.
762
763 Parameters:
764
765 row: the rowno to get the information for. (default all rows)
766
767 Returns:
768
769 a list of Tsys values for the current selection
770
771 """
772 return self._get_column( self._gettsysspectrum, row )
773
774 def get_weather(self, row=-1):
775 """\
776 Return the weather informations.
777
778 Parameters:
779
780 row: the rowno to get the information for. (default all rows)
781
782 Returns:
783
784 a dict or list of of dicts of values for the current selection
785
786 """
787
788 values = self._get_column(self._get_weather, row)
789 if row > -1:
790 return {'temperature': values[0],
791 'pressure': values[1], 'humidity' : values[2],
792 'windspeed' : values[3], 'windaz' : values[4]
793 }
794 else:
795 out = []
796 for r in values:
797
798 out.append({'temperature': r[0],
799 'pressure': r[1], 'humidity' : r[2],
800 'windspeed' : r[3], 'windaz' : r[4]
801 })
802 return out
803
804 def _row_callback(self, callback, label):
805 out = ""
806 outvec = []
807 sep = '-'*50
808 for i in range(self.nrow()):
809 tm = self._gettime(i)
810 src = self._getsourcename(i)
811 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
812 out += 'Time[%s]:\n' % (tm)
813 if self.nbeam(-1) > 1:
814 out += ' Beam[%d] ' % (self.getbeam(i))
815 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
816 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
817 outvec.append(callback(i))
818 out += '= %3.3f\n' % (outvec[i])
819 out += sep+'\n'
820
821 asaplog.push(sep)
822 asaplog.push(" %s" % (label))
823 asaplog.push(sep)
824 asaplog.push(out)
825 asaplog.post()
826 return outvec
827
828 def _get_column(self, callback, row=-1, *args):
829 """
830 """
831 if row == -1:
832 return [callback(i, *args) for i in range(self.nrow())]
833 else:
834 if 0 <= row < self.nrow():
835 return callback(row, *args)
836
837
838 def get_time(self, row=-1, asdatetime=False, prec=-1):
839 """\
840 Get a list of time stamps for the observations.
841 Return a datetime object or a string (default) for each
842 integration time stamp in the scantable.
843
844 Parameters:
845
846 row: row no of integration. Default -1 return all rows
847
848 asdatetime: return values as datetime objects rather than strings
849
850 prec: number of digits shown. Default -1 to automatic
851 calculation.
852 Note this number is equals to the digits of MVTime,
853 i.e., 0<prec<3: dates with hh:: only,
854 <5: with hh:mm:, <7 or 0: with hh:mm:ss,
855 and 6> : with hh:mm:ss.tt... (prec-6 t's added)
856
857 """
858 from datetime import datetime
859 if prec < 0:
860 # automagically set necessary precision +1
861 prec = 7 - \
862 numpy.floor(numpy.log10(numpy.min(self.get_inttime(row))))
863 prec = max(6, int(prec))
864 else:
865 prec = max(0, prec)
866 if asdatetime:
867 #precision can be 1 millisecond at max
868 prec = min(12, prec)
869
870 times = self._get_column(self._gettime, row, prec)
871 if not asdatetime:
872 return times
873 format = "%Y/%m/%d/%H:%M:%S.%f"
874 if prec < 7:
875 nsub = 1 + (((6-prec)/2) % 3)
876 substr = [".%f","%S","%M"]
877 for i in range(nsub):
878 format = format.replace(substr[i],"")
879 if isinstance(times, list):
880 return [datetime.strptime(i, format) for i in times]
881 else:
882 return datetime.strptime(times, format)
883
884
885 def get_inttime(self, row=-1):
886 """\
887 Get a list of integration times for the observations.
888 Return a time in seconds for each integration in the scantable.
889
890 Parameters:
891
892 row: row no of integration. Default -1 return all rows.
893
894 """
895 return self._get_column(self._getinttime, row)
896
897
898 def get_sourcename(self, row=-1):
899 """\
900 Get a list source names for the observations.
901 Return a string for each integration in the scantable.
902 Parameters:
903
904 row: row no of integration. Default -1 return all rows.
905
906 """
907 return self._get_column(self._getsourcename, row)
908
909 def get_elevation(self, row=-1):
910 """\
911 Get a list of elevations for the observations.
912 Return a float for each integration in the scantable.
913
914 Parameters:
915
916 row: row no of integration. Default -1 return all rows.
917
918 """
919 return self._get_column(self._getelevation, row)
920
921 def get_azimuth(self, row=-1):
922 """\
923 Get a list of azimuths for the observations.
924 Return a float for each integration in the scantable.
925
926 Parameters:
927 row: row no of integration. Default -1 return all rows.
928
929 """
930 return self._get_column(self._getazimuth, row)
931
932 def get_parangle(self, row=-1):
933 """\
934 Get a list of parallactic angles for the observations.
935 Return a float for each integration in the scantable.
936
937 Parameters:
938
939 row: row no of integration. Default -1 return all rows.
940
941 """
942 return self._get_column(self._getparangle, row)
943
944 def get_direction(self, row=-1):
945 """
946 Get a list of Positions on the sky (direction) for the observations.
947 Return a string for each integration in the scantable.
948
949 Parameters:
950
951 row: row no of integration. Default -1 return all rows
952
953 """
954 return self._get_column(self._getdirection, row)
955
956 def get_directionval(self, row=-1):
957 """\
958 Get a list of Positions on the sky (direction) for the observations.
959 Return a float for each integration in the scantable.
960
961 Parameters:
962
963 row: row no of integration. Default -1 return all rows
964
965 """
966 return self._get_column(self._getdirectionvec, row)
967
968 @asaplog_post_dec
969 def set_unit(self, unit='channel'):
970 """\
971 Set the unit for all following operations on this scantable
972
973 Parameters:
974
975 unit: optional unit, default is 'channel'. Use one of '*Hz',
976 'km/s', 'channel' or equivalent ''
977
978 """
979 varlist = vars()
980 if unit in ['', 'pixel', 'channel']:
981 unit = ''
982 inf = list(self._getcoordinfo())
983 inf[0] = unit
984 self._setcoordinfo(inf)
985 self._add_history("set_unit", varlist)
986
987 @asaplog_post_dec
988 def set_instrument(self, instr):
989 """\
990 Set the instrument for subsequent processing.
991
992 Parameters:
993
994 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
995 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
996
997 """
998 self._setInstrument(instr)
999 self._add_history("set_instument", vars())
1000
1001 @asaplog_post_dec
1002 def set_feedtype(self, feedtype):
1003 """\
1004 Overwrite the feed type, which might not be set correctly.
1005
1006 Parameters:
1007
1008 feedtype: 'linear' or 'circular'
1009
1010 """
1011 self._setfeedtype(feedtype)
1012 self._add_history("set_feedtype", vars())
1013
1014 @asaplog_post_dec
1015 def set_doppler(self, doppler='RADIO'):
1016 """\
1017 Set the doppler for all following operations on this scantable.
1018
1019 Parameters:
1020
1021 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
1022
1023 """
1024 varlist = vars()
1025 inf = list(self._getcoordinfo())
1026 inf[2] = doppler
1027 self._setcoordinfo(inf)
1028 self._add_history("set_doppler", vars())
1029
1030 @asaplog_post_dec
1031 def set_freqframe(self, frame=None):
1032 """\
1033 Set the frame type of the Spectral Axis.
1034
1035 Parameters:
1036
1037 frame: an optional frame type, default 'LSRK'. Valid frames are:
1038 'TOPO', 'LSRD', 'LSRK', 'BARY',
1039 'GEO', 'GALACTO', 'LGROUP', 'CMB'
1040
1041 Example::
1042
1043 scan.set_freqframe('BARY')
1044
1045 """
1046 frame = frame or rcParams['scantable.freqframe']
1047 varlist = vars()
1048 # "REST" is not implemented in casacore
1049 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
1050 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1051 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
1052 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1053
1054 if frame in valid:
1055 inf = list(self._getcoordinfo())
1056 inf[1] = frame
1057 self._setcoordinfo(inf)
1058 self._add_history("set_freqframe", varlist)
1059 else:
1060 msg = "Please specify a valid freq type. Valid types are:\n", valid
1061 raise TypeError(msg)
1062
1063 @asaplog_post_dec
1064 def set_dirframe(self, frame=""):
1065 """\
1066 Set the frame type of the Direction on the sky.
1067
1068 Parameters:
1069
1070 frame: an optional frame type, default ''. Valid frames are:
1071 'J2000', 'B1950', 'GALACTIC'
1072
1073 Example:
1074
1075 scan.set_dirframe('GALACTIC')
1076
1077 """
1078 varlist = vars()
1079 Scantable.set_dirframe(self, frame)
1080 self._add_history("set_dirframe", varlist)
1081
1082 def get_unit(self):
1083 """\
1084 Get the default unit set in this scantable
1085
1086 Returns:
1087
1088 A unit string
1089
1090 """
1091 inf = self._getcoordinfo()
1092 unit = inf[0]
1093 if unit == '': unit = 'channel'
1094 return unit
1095
1096 @asaplog_post_dec
1097 def get_abcissa(self, rowno=0):
1098 """\
1099 Get the abcissa in the current coordinate setup for the currently
1100 selected Beam/IF/Pol
1101
1102 Parameters:
1103
1104 rowno: an optional row number in the scantable. Default is the
1105 first row, i.e. rowno=0
1106
1107 Returns:
1108
1109 The abcissa values and the format string (as a dictionary)
1110
1111 """
1112 abc = self._getabcissa(rowno)
1113 lbl = self._getabcissalabel(rowno)
1114 return abc, lbl
1115
1116 @asaplog_post_dec
1117 def flag(self, mask=None, unflag=False, row=-1):
1118 """\
1119 Flag the selected data using an optional channel mask.
1120
1121 Parameters:
1122
1123 mask: an optional channel mask, created with create_mask. Default
1124 (no mask) is all channels.
1125
1126 unflag: if True, unflag the data
1127
1128 row: an optional row number in the scantable.
1129 Default -1 flags all rows
1130
1131 """
1132 varlist = vars()
1133 mask = mask or []
1134 self._flag(row, mask, unflag)
1135 self._add_history("flag", varlist)
1136
1137 @asaplog_post_dec
1138 def flag_row(self, rows=None, unflag=False):
1139 """\
1140 Flag the selected data in row-based manner.
1141
1142 Parameters:
1143
1144 rows: list of row numbers to be flagged. Default is no row
1145 (must be explicitly specified to execute row-based
1146 flagging).
1147
1148 unflag: if True, unflag the data.
1149
1150 """
1151 varlist = vars()
1152 if rows is None:
1153 rows = []
1154 self._flag_row(rows, unflag)
1155 self._add_history("flag_row", varlist)
1156
1157 @asaplog_post_dec
1158 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
1159 """\
1160 Flag the selected data outside a specified range (in channel-base)
1161
1162 Parameters:
1163
1164 uthres: upper threshold.
1165
1166 dthres: lower threshold
1167
1168 clipoutside: True for flagging data outside the range
1169 [dthres:uthres].
1170 False for flagging data inside the range.
1171
1172 unflag: if True, unflag the data.
1173
1174 """
1175 varlist = vars()
1176 self._clip(uthres, dthres, clipoutside, unflag)
1177 self._add_history("clip", varlist)
1178
1179 @asaplog_post_dec
1180 def lag_flag(self, start, end, unit="MHz", insitu=None):
1181 """\
1182 Flag the data in 'lag' space by providing a frequency to remove.
1183 Flagged data in the scantable get interpolated over the region.
1184 No taper is applied.
1185
1186 Parameters:
1187
1188 start: the start frequency (really a period within the
1189 bandwidth) or period to remove
1190
1191 end: the end frequency or period to remove
1192
1193 unit: the frequency unit (default "MHz") or "" for
1194 explicit lag channels
1195
1196 *Notes*:
1197
1198 It is recommended to flag edges of the band or strong
1199 signals beforehand.
1200
1201 """
1202 if insitu is None: insitu = rcParams['insitu']
1203 self._math._setinsitu(insitu)
1204 varlist = vars()
1205 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1206 if not (unit == "" or base.has_key(unit)):
1207 raise ValueError("%s is not a valid unit." % unit)
1208 if unit == "":
1209 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1210 else:
1211 s = scantable(self._math._lag_flag(self, start*base[unit],
1212 end*base[unit], "frequency"))
1213 s._add_history("lag_flag", varlist)
1214 if insitu:
1215 self._assign(s)
1216 else:
1217 return s
1218
1219 @asaplog_post_dec
1220 def fft(self, rowno=None, mask=None, getrealimag=False):
1221 """\
1222 Apply FFT to the spectra.
1223 Flagged data in the scantable get interpolated over the region.
1224
1225 Parameters:
1226
1227 rowno: The row number(s) to be processed. int, list
1228 and tuple are accepted. By default (None), FFT
1229 is applied to the whole data.
1230
1231 mask: Auxiliary channel mask(s). Given as a boolean
1232 list, it is applied to all specified rows.
1233 A list of boolean lists can also be used to
1234 apply different masks. In the latter case, the
1235 length of 'mask' must be the same as that of
1236 'rowno'. The default is None.
1237
1238 getrealimag: If True, returns the real and imaginary part
1239 values of the complex results.
1240 If False (the default), returns the amplitude
1241 (absolute value) normalised with Ndata/2 and
1242 phase (argument, in unit of radian).
1243
1244 Returns:
1245
1246 A list of dictionaries containing the results for each spectrum.
1247 Each dictionary contains two values, the real and the imaginary
1248 parts when getrealimag = True, or the amplitude(absolute value)
1249 and the phase(argument) when getrealimag = False. The key for
1250 these values are 'real' and 'imag', or 'ampl' and 'phase',
1251 respectively.
1252 """
1253 if rowno is None:
1254 rowno = []
1255 if mask is None:
1256 mask = []
1257 if isinstance(rowno, int):
1258 rowno = [rowno]
1259 elif not (isinstance(rowno, list) or isinstance(rowno, tuple)):
1260 raise TypeError("The row number(s) must be int, list or tuple.")
1261
1262 if len(rowno) == 0: rowno = [i for i in xrange(self.nrow())]
1263
1264 if not (isinstance(mask, list) or isinstance(mask, tuple)):
1265 raise TypeError("The mask must be a boolean list or a list of "
1266 "boolean list.")
1267 if len(mask) == 0: mask = [True for i in xrange(self.nchan())]
1268 if isinstance(mask[0], bool): mask = [mask]
1269 elif not (isinstance(mask[0], list) or isinstance(mask[0], tuple)):
1270 raise TypeError("The mask must be a boolean list or a list of "
1271 "boolean list.")
1272
1273 usecommonmask = (len(mask) == 1)
1274 if not usecommonmask:
1275 if len(mask) != len(rowno):
1276 raise ValueError("When specifying masks for each spectrum, "
1277 "the numbers of them must be identical.")
1278 for amask in mask:
1279 if len(amask) != self.nchan():
1280 raise ValueError("The spectra and the mask have different "
1281 "length.")
1282
1283 res = []
1284
1285 imask = 0
1286 for whichrow in rowno:
1287 fspec = self._fft(whichrow, mask[imask], getrealimag)
1288 nspec = len(fspec)
1289
1290 i = 0
1291 v1 = []
1292 v2 = []
1293 reselem = {"real":[],"imag":[]} if getrealimag \
1294 else {"ampl":[],"phase":[]}
1295
1296 while (i < nspec):
1297 v1.append(fspec[i])
1298 v2.append(fspec[i+1])
1299 i += 2
1300
1301 if getrealimag:
1302 reselem["real"] += v1
1303 reselem["imag"] += v2
1304 else:
1305 reselem["ampl"] += v1
1306 reselem["phase"] += v2
1307
1308 res.append(reselem)
1309
1310 if not usecommonmask:
1311 imask += 1
1312
1313 return res
1314
1315 @asaplog_post_dec
1316 def create_mask(self, *args, **kwargs):
1317 """\
1318 Compute and return a mask based on [min, max] windows.
1319 The specified windows are to be INCLUDED, when the mask is
1320 applied.
1321
1322 Parameters:
1323
1324 [min, max], [min2, max2], ...
1325 Pairs of start/end points (inclusive)specifying the regions
1326 to be masked
1327
1328 invert: optional argument. If specified as True,
1329 return an inverted mask, i.e. the regions
1330 specified are EXCLUDED
1331
1332 row: create the mask using the specified row for
1333 unit conversions, default is row=0
1334 only necessary if frequency varies over rows.
1335
1336 Examples::
1337
1338 scan.set_unit('channel')
1339 # a)
1340 msk = scan.create_mask([400, 500], [800, 900])
1341 # masks everything outside 400 and 500
1342 # and 800 and 900 in the unit 'channel'
1343
1344 # b)
1345 msk = scan.create_mask([400, 500], [800, 900], invert=True)
1346 # masks the regions between 400 and 500
1347 # and 800 and 900 in the unit 'channel'
1348
1349 # c)
1350 #mask only channel 400
1351 msk = scan.create_mask([400])
1352
1353 """
1354 row = kwargs.get("row", 0)
1355 data = self._getabcissa(row)
1356 u = self._getcoordinfo()[0]
1357 if u == "":
1358 u = "channel"
1359 msg = "The current mask window unit is %s" % u
1360 i = self._check_ifs()
1361 if not i:
1362 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1363 asaplog.push(msg)
1364 n = len(data)
1365 msk = _n_bools(n, False)
1366 # test if args is a 'list' or a 'normal *args - UGLY!!!
1367
1368 ws = (isinstance(args[-1][-1], int)
1369 or isinstance(args[-1][-1], float)) and args or args[0]
1370 for window in ws:
1371 if len(window) == 1:
1372 window = [window[0], window[0]]
1373 if len(window) == 0 or len(window) > 2:
1374 raise ValueError("A window needs to be defined as "
1375 "[start(, end)]")
1376 if window[0] > window[1]:
1377 tmp = window[0]
1378 window[0] = window[1]
1379 window[1] = tmp
1380 for i in range(n):
1381 if data[i] >= window[0] and data[i] <= window[1]:
1382 msk[i] = True
1383 if kwargs.has_key('invert'):
1384 if kwargs.get('invert'):
1385 msk = mask_not(msk)
1386 return msk
1387
1388 def get_masklist(self, mask=None, row=0, silent=False):
1389 """\
1390 Compute and return a list of mask windows, [min, max].
1391
1392 Parameters:
1393
1394 mask: channel mask, created with create_mask.
1395
1396 row: calcutate the masklist using the specified row
1397 for unit conversions, default is row=0
1398 only necessary if frequency varies over rows.
1399
1400 Returns:
1401
1402 [min, max], [min2, max2], ...
1403 Pairs of start/end points (inclusive)specifying
1404 the masked regions
1405
1406 """
1407 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1408 raise TypeError("The mask should be list or tuple.")
1409 if len(mask) < 2:
1410 raise TypeError("The mask elements should be > 1")
1411 data = self._getabcissa(row)
1412 if len(data) != len(mask):
1413 msg = "Number of channels in scantable != number of mask elements"
1414 raise TypeError(msg)
1415 u = self._getcoordinfo()[0]
1416 if u == "":
1417 u = "channel"
1418 msg = "The current mask window unit is %s" % u
1419 i = self._check_ifs()
1420 if not i:
1421 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1422 if not silent:
1423 asaplog.push(msg)
1424 masklist = []
1425 ist, ien = None, None
1426 ist, ien=self.get_mask_indices(mask)
1427 if ist is not None and ien is not None:
1428 for i in xrange(len(ist)):
1429 range=[data[ist[i]],data[ien[i]]]
1430 range.sort()
1431 masklist.append([range[0],range[1]])
1432 return masklist
1433
1434 def get_mask_indices(self, mask=None):
1435 """\
1436 Compute and Return lists of mask start indices and mask end indices.
1437
1438 Parameters:
1439
1440 mask: channel mask, created with create_mask.
1441
1442 Returns:
1443
1444 List of mask start indices and that of mask end indices,
1445 i.e., [istart1,istart2,....], [iend1,iend2,....].
1446
1447 """
1448 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1449 raise TypeError("The mask should be list or tuple.")
1450 if len(mask) < 2:
1451 raise TypeError("The mask elements should be > 1")
1452 istart = []
1453 iend = []
1454 if mask[0]:
1455 istart.append(0)
1456 for i in range(len(mask)-1):
1457 if not mask[i] and mask[i+1]:
1458 istart.append(i+1)
1459 elif mask[i] and not mask[i+1]:
1460 iend.append(i)
1461 if mask[len(mask)-1]:
1462 iend.append(len(mask)-1)
1463 if len(istart) != len(iend):
1464 raise RuntimeError("Numbers of mask start != mask end.")
1465 for i in range(len(istart)):
1466 if istart[i] > iend[i]:
1467 raise RuntimeError("Mask start index > mask end index")
1468 break
1469 return istart,iend
1470
1471 @asaplog_post_dec
1472 def parse_maskexpr(self, maskstring):
1473 """
1474 Parse CASA type mask selection syntax (IF dependent).
1475
1476 Parameters:
1477 maskstring : A string mask selection expression.
1478 A comma separated selections mean different IF -
1479 channel combinations. IFs and channel selections
1480 are partitioned by a colon, ':'.
1481 examples:
1482 '' = all IFs (all channels)
1483 '<2,4~6,9' = IFs 0,1,4,5,6,9 (all channels)
1484 '3:3~45;60' = channels 3 to 45 and 60 in IF 3
1485 '0~1:2~6,8' = channels 2 to 6 in IFs 0,1, and
1486 all channels in IF8
1487 Returns:
1488 A dictionary of selected (valid) IF and masklist pairs,
1489 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
1490 """
1491 if not isinstance(maskstring,str):
1492 asaplog.post()
1493 asaplog.push("Invalid mask expression")
1494 asaplog.post("ERROR")
1495
1496 valid_ifs = self.getifnos()
1497 frequnit = self.get_unit()
1498 seldict = {}
1499 if maskstring == "":
1500 maskstring = str(valid_ifs)[1:-1]
1501 ## split each selection
1502 sellist = maskstring.split(',')
1503 for currselstr in sellist:
1504 selset = currselstr.split(':')
1505 # spw and mask string (may include ~, < or >)
1506 spwmasklist = self._parse_selection(selset[0], typestr='integer',
1507 offset=1, minval=min(valid_ifs),
1508 maxval=max(valid_ifs))
1509 for spwlist in spwmasklist:
1510 selspws = []
1511 for ispw in range(spwlist[0],spwlist[1]+1):
1512 # Put into the list only if ispw exists
1513 if valid_ifs.count(ispw):
1514 selspws.append(ispw)
1515 del spwmasklist, spwlist
1516
1517 # parse frequency mask list
1518 if len(selset) > 1:
1519 freqmasklist = self._parse_selection(selset[1], typestr='float',
1520 offset=0.)
1521 else:
1522 # want to select the whole spectrum
1523 freqmasklist = [None]
1524
1525 ## define a dictionary of spw - masklist combination
1526 for ispw in selspws:
1527 #print "working on", ispw
1528 spwstr = str(ispw)
1529 if len(selspws) == 0:
1530 # empty spw
1531 continue
1532 else:
1533 ## want to get min and max of the spw and
1534 ## offset to set for '<' and '>'
1535 if frequnit == 'channel':
1536 minfreq = 0
1537 maxfreq = self.nchan(ifno=ispw)
1538 offset = 0.5
1539 else:
1540 ## This is ugly part. need improvement
1541 for ifrow in xrange(self.nrow()):
1542 if self.getif(ifrow) == ispw:
1543 #print "IF",ispw,"found in row =",ifrow
1544 break
1545 freqcoord = self.get_coordinate(ifrow)
1546 freqs = self._getabcissa(ifrow)
1547 minfreq = min(freqs)
1548 maxfreq = max(freqs)
1549 if len(freqs) == 1:
1550 offset = 0.5
1551 elif frequnit.find('Hz') > 0:
1552 offset = abs(freqcoord.to_frequency(1,
1553 unit=frequnit)
1554 -freqcoord.to_frequency(0,
1555 unit=frequnit)
1556 )*0.5
1557 elif frequnit.find('m/s') > 0:
1558 offset = abs(freqcoord.to_velocity(1,
1559 unit=frequnit)
1560 -freqcoord.to_velocity(0,
1561 unit=frequnit)
1562 )*0.5
1563 else:
1564 asaplog.post()
1565 asaplog.push("Invalid frequency unit")
1566 asaplog.post("ERROR")
1567 del freqs, freqcoord, ifrow
1568 for freq in freqmasklist:
1569 selmask = freq or [minfreq, maxfreq]
1570 if selmask[0] == None:
1571 ## selection was "<freq[1]".
1572 if selmask[1] < minfreq:
1573 ## avoid adding region selection
1574 selmask = None
1575 else:
1576 selmask = [minfreq,selmask[1]-offset]
1577 elif selmask[1] == None:
1578 ## selection was ">freq[0]"
1579 if selmask[0] > maxfreq:
1580 ## avoid adding region selection
1581 selmask = None
1582 else:
1583 selmask = [selmask[0]+offset,maxfreq]
1584 if selmask:
1585 if not seldict.has_key(spwstr):
1586 # new spw selection
1587 seldict[spwstr] = []
1588 seldict[spwstr] += [selmask]
1589 del minfreq,maxfreq,offset,freq,selmask
1590 del spwstr
1591 del freqmasklist
1592 del valid_ifs
1593 if len(seldict) == 0:
1594 asaplog.post()
1595 asaplog.push("No valid selection in the mask expression: "
1596 +maskstring)
1597 asaplog.post("WARN")
1598 return None
1599 msg = "Selected masklist:\n"
1600 for sif, lmask in seldict.iteritems():
1601 msg += " IF"+sif+" - "+str(lmask)+"\n"
1602 asaplog.push(msg)
1603 return seldict
1604
1605 def _parse_selection(self, selstr, typestr='float', offset=0.,
1606 minval=None, maxval=None):
1607 """
1608 Parameters:
1609 selstr : The Selection string, e.g., '<3;5~7;100~103;9'
1610 typestr : The type of the values in returned list
1611 ('integer' or 'float')
1612 offset : The offset value to subtract from or add to
1613 the boundary value if the selection string
1614 includes '<' or '>'
1615 minval, maxval : The minimum/maximum values to set if the
1616 selection string includes '<' or '>'.
1617 The list element is filled with None by default.
1618 Returns:
1619 A list of min/max pair of selections.
1620 Example:
1621 _parseSelection('<3;5~7;9',typestr='int',offset=1,minval=0)
1622 returns [[0,2],[5,7],[9,9]]
1623 """
1624 selgroups = selstr.split(';')
1625 sellists = []
1626 if typestr.lower().startswith('int'):
1627 formatfunc = int
1628 else:
1629 formatfunc = float
1630
1631 for currsel in selgroups:
1632 if currsel.find('~') > 0:
1633 minsel = formatfunc(currsel.split('~')[0].strip())
1634 maxsel = formatfunc(currsel.split('~')[1].strip())
1635 elif currsel.strip().startswith('<'):
1636 minsel = minval
1637 maxsel = formatfunc(currsel.split('<')[1].strip()) \
1638 - formatfunc(offset)
1639 elif currsel.strip().startswith('>'):
1640 minsel = formatfunc(currsel.split('>')[1].strip()) \
1641 + formatfunc(offset)
1642 maxsel = maxval
1643 else:
1644 minsel = formatfunc(currsel)
1645 maxsel = formatfunc(currsel)
1646 sellists.append([minsel,maxsel])
1647 return sellists
1648
1649# def get_restfreqs(self):
1650# """
1651# Get the restfrequency(s) stored in this scantable.
1652# The return value(s) are always of unit 'Hz'
1653# Parameters:
1654# none
1655# Returns:
1656# a list of doubles
1657# """
1658# return list(self._getrestfreqs())
1659
1660 def get_restfreqs(self, ids=None):
1661 """\
1662 Get the restfrequency(s) stored in this scantable.
1663 The return value(s) are always of unit 'Hz'
1664
1665 Parameters:
1666
1667 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1668 be retrieved
1669
1670 Returns:
1671
1672 dictionary containing ids and a list of doubles for each id
1673
1674 """
1675 if ids is None:
1676 rfreqs = {}
1677 idlist = self.getmolnos()
1678 for i in idlist:
1679 rfreqs[i] = list(self._getrestfreqs(i))
1680 return rfreqs
1681 else:
1682 if type(ids) == list or type(ids) == tuple:
1683 rfreqs = {}
1684 for i in ids:
1685 rfreqs[i] = list(self._getrestfreqs(i))
1686 return rfreqs
1687 else:
1688 return list(self._getrestfreqs(ids))
1689
1690 @asaplog_post_dec
1691 def set_restfreqs(self, freqs=None, unit='Hz'):
1692 """\
1693 Set or replace the restfrequency specified and
1694 if the 'freqs' argument holds a scalar,
1695 then that rest frequency will be applied to all the selected
1696 data. If the 'freqs' argument holds
1697 a vector, then it MUST be of equal or smaller length than
1698 the number of IFs (and the available restfrequencies will be
1699 replaced by this vector). In this case, *all* data have
1700 the restfrequency set per IF according
1701 to the corresponding value you give in the 'freqs' vector.
1702 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
1703 IF 1 gets restfreq 2e9.
1704
1705 You can also specify the frequencies via a linecatalog.
1706
1707 Parameters:
1708
1709 freqs: list of rest frequency values or string idenitfiers
1710
1711 unit: unit for rest frequency (default 'Hz')
1712
1713
1714 Example::
1715
1716 # set the given restfrequency for the all currently selected IFs
1717 scan.set_restfreqs(freqs=1.4e9)
1718 # set restfrequencies for the n IFs (n > 1) in the order of the
1719 # list, i.e
1720 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
1721 # len(list_of_restfreqs) == nIF
1722 # for nIF == 1 the following will set multiple restfrequency for
1723 # that IF
1724 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
1725 # set multiple restfrequencies per IF. as a list of lists where
1726 # the outer list has nIF elements, the inner s arbitrary
1727 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
1728
1729 *Note*:
1730
1731 To do more sophisticate Restfrequency setting, e.g. on a
1732 source and IF basis, use scantable.set_selection() before using
1733 this function::
1734
1735 # provided your scantable is called scan
1736 selection = selector()
1737 selection.set_name("ORION*")
1738 selection.set_ifs([1])
1739 scan.set_selection(selection)
1740 scan.set_restfreqs(freqs=86.6e9)
1741
1742 """
1743 varlist = vars()
1744 from asap import linecatalog
1745 # simple value
1746 if isinstance(freqs, int) or isinstance(freqs, float):
1747 self._setrestfreqs([freqs], [""], unit)
1748 # list of values
1749 elif isinstance(freqs, list) or isinstance(freqs, tuple):
1750 # list values are scalars
1751 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
1752 if len(freqs) == 1:
1753 self._setrestfreqs(freqs, [""], unit)
1754 else:
1755 # allow the 'old' mode of setting mulitple IFs
1756 sel = selector()
1757 savesel = self._getselection()
1758 iflist = self.getifnos()
1759 if len(freqs)>len(iflist):
1760 raise ValueError("number of elements in list of list "
1761 "exeeds the current IF selections")
1762 iflist = self.getifnos()
1763 for i, fval in enumerate(freqs):
1764 sel.set_ifs(iflist[i])
1765 self._setselection(sel)
1766 self._setrestfreqs([fval], [""], unit)
1767 self._setselection(savesel)
1768
1769 # list values are dict, {'value'=, 'name'=)
1770 elif isinstance(freqs[-1], dict):
1771 values = []
1772 names = []
1773 for d in freqs:
1774 values.append(d["value"])
1775 names.append(d["name"])
1776 self._setrestfreqs(values, names, unit)
1777 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
1778 sel = selector()
1779 savesel = self._getselection()
1780 iflist = self.getifnos()
1781 if len(freqs)>len(iflist):
1782 raise ValueError("number of elements in list of list exeeds"
1783 " the current IF selections")
1784 for i, fval in enumerate(freqs):
1785 sel.set_ifs(iflist[i])
1786 self._setselection(sel)
1787 self._setrestfreqs(fval, [""], unit)
1788 self._setselection(savesel)
1789 # freqs are to be taken from a linecatalog
1790 elif isinstance(freqs, linecatalog):
1791 sel = selector()
1792 savesel = self._getselection()
1793 for i in xrange(freqs.nrow()):
1794 sel.set_ifs(iflist[i])
1795 self._setselection(sel)
1796 self._setrestfreqs([freqs.get_frequency(i)],
1797 [freqs.get_name(i)], "MHz")
1798 # ensure that we are not iterating past nIF
1799 if i == self.nif()-1: break
1800 self._setselection(savesel)
1801 else:
1802 return
1803 self._add_history("set_restfreqs", varlist)
1804
1805 @asaplog_post_dec
1806 def shift_refpix(self, delta):
1807 """\
1808 Shift the reference pixel of the Spectra Coordinate by an
1809 integer amount.
1810
1811 Parameters:
1812
1813 delta: the amount to shift by
1814
1815 *Note*:
1816
1817 Be careful using this with broadband data.
1818
1819 """
1820 varlist = vars()
1821 Scantable.shift_refpix(self, delta)
1822 s._add_history("shift_refpix", varlist)
1823
1824 @asaplog_post_dec
1825 def history(self, filename=None):
1826 """\
1827 Print the history. Optionally to a file.
1828
1829 Parameters:
1830
1831 filename: The name of the file to save the history to.
1832
1833 """
1834 hist = list(self._gethistory())
1835 out = "-"*80
1836 for h in hist:
1837 if h.startswith("---"):
1838 out = "\n".join([out, h])
1839 else:
1840 items = h.split("##")
1841 date = items[0]
1842 func = items[1]
1843 items = items[2:]
1844 out += "\n"+date+"\n"
1845 out += "Function: %s\n Parameters:" % (func)
1846 for i in items:
1847 if i == '':
1848 continue
1849 s = i.split("=")
1850 out += "\n %s = %s" % (s[0], s[1])
1851 out = "\n".join([out, "-"*80])
1852 if filename is not None:
1853 if filename is "":
1854 filename = 'scantable_history.txt'
1855 import os
1856 filename = os.path.expandvars(os.path.expanduser(filename))
1857 if not os.path.isdir(filename):
1858 data = open(filename, 'w')
1859 data.write(out)
1860 data.close()
1861 else:
1862 msg = "Illegal file name '%s'." % (filename)
1863 raise IOError(msg)
1864 return page(out)
1865
1866 #
1867 # Maths business
1868 #
1869 @asaplog_post_dec
1870 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
1871 """\
1872 Return the (time) weighted average of a scan. Scans will be averaged
1873 only if the source direction (RA/DEC) is within 1' otherwise
1874
1875 *Note*:
1876
1877 in channels only - align if necessary
1878
1879 Parameters:
1880
1881 mask: an optional mask (only used for 'var' and 'tsys'
1882 weighting)
1883
1884 scanav: True averages each scan separately
1885 False (default) averages all scans together,
1886
1887 weight: Weighting scheme.
1888 'none' (mean no weight)
1889 'var' (1/var(spec) weighted)
1890 'tsys' (1/Tsys**2 weighted)
1891 'tint' (integration time weighted)
1892 'tintsys' (Tint/Tsys**2)
1893 'median' ( median averaging)
1894 The default is 'tint'
1895
1896 align: align the spectra in velocity before averaging. It takes
1897 the time of the first spectrum as reference time.
1898
1899 Example::
1900
1901 # time average the scantable without using a mask
1902 newscan = scan.average_time()
1903
1904 """
1905 varlist = vars()
1906 weight = weight or 'TINT'
1907 mask = mask or ()
1908 scanav = (scanav and 'SCAN') or 'NONE'
1909 scan = (self, )
1910
1911 if align:
1912 scan = (self.freq_align(insitu=False), )
1913 s = None
1914 if weight.upper() == 'MEDIAN':
1915 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1916 scanav))
1917 else:
1918 s = scantable(self._math._average(scan, mask, weight.upper(),
1919 scanav))
1920 s._add_history("average_time", varlist)
1921 return s
1922
1923 @asaplog_post_dec
1924 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
1925 """\
1926 Return a scan where all spectra are converted to either
1927 Jansky or Kelvin depending upon the flux units of the scan table.
1928 By default the function tries to look the values up internally.
1929 If it can't find them (or if you want to over-ride), you must
1930 specify EITHER jyperk OR eta (and D which it will try to look up
1931 also if you don't set it). jyperk takes precedence if you set both.
1932
1933 Parameters:
1934
1935 jyperk: the Jy / K conversion factor
1936
1937 eta: the aperture efficiency
1938
1939 d: the geometric diameter (metres)
1940
1941 insitu: if False a new scantable is returned.
1942 Otherwise, the scaling is done in-situ
1943 The default is taken from .asaprc (False)
1944
1945 """
1946 if insitu is None: insitu = rcParams['insitu']
1947 self._math._setinsitu(insitu)
1948 varlist = vars()
1949 jyperk = jyperk or -1.0
1950 d = d or -1.0
1951 eta = eta or -1.0
1952 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1953 s._add_history("convert_flux", varlist)
1954 if insitu: self._assign(s)
1955 else: return s
1956
1957 @asaplog_post_dec
1958 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1959 """\
1960 Return a scan after applying a gain-elevation correction.
1961 The correction can be made via either a polynomial or a
1962 table-based interpolation (and extrapolation if necessary).
1963 You specify polynomial coefficients, an ascii table or neither.
1964 If you specify neither, then a polynomial correction will be made
1965 with built in coefficients known for certain telescopes (an error
1966 will occur if the instrument is not known).
1967 The data and Tsys are *divided* by the scaling factors.
1968
1969 Parameters:
1970
1971 poly: Polynomial coefficients (default None) to compute a
1972 gain-elevation correction as a function of
1973 elevation (in degrees).
1974
1975 filename: The name of an ascii file holding correction factors.
1976 The first row of the ascii file must give the column
1977 names and these MUST include columns
1978 "ELEVATION" (degrees) and "FACTOR" (multiply data
1979 by this) somewhere.
1980 The second row must give the data type of the
1981 column. Use 'R' for Real and 'I' for Integer.
1982 An example file would be
1983 (actual factors are arbitrary) :
1984
1985 TIME ELEVATION FACTOR
1986 R R R
1987 0.1 0 0.8
1988 0.2 20 0.85
1989 0.3 40 0.9
1990 0.4 60 0.85
1991 0.5 80 0.8
1992 0.6 90 0.75
1993
1994 method: Interpolation method when correcting from a table.
1995 Values are "nearest", "linear" (default), "cubic"
1996 and "spline"
1997
1998 insitu: if False a new scantable is returned.
1999 Otherwise, the scaling is done in-situ
2000 The default is taken from .asaprc (False)
2001
2002 """
2003
2004 if insitu is None: insitu = rcParams['insitu']
2005 self._math._setinsitu(insitu)
2006 varlist = vars()
2007 poly = poly or ()
2008 from os.path import expandvars
2009 filename = expandvars(filename)
2010 s = scantable(self._math._gainel(self, poly, filename, method))
2011 s._add_history("gain_el", varlist)
2012 if insitu:
2013 self._assign(s)
2014 else:
2015 return s
2016
2017 @asaplog_post_dec
2018 def freq_align(self, reftime=None, method='cubic', insitu=None):
2019 """\
2020 Return a scan where all rows have been aligned in frequency/velocity.
2021 The alignment frequency frame (e.g. LSRK) is that set by function
2022 set_freqframe.
2023
2024 Parameters:
2025
2026 reftime: reference time to align at. By default, the time of
2027 the first row of data is used.
2028
2029 method: Interpolation method for regridding the spectra.
2030 Choose from "nearest", "linear", "cubic" (default)
2031 and "spline"
2032
2033 insitu: if False a new scantable is returned.
2034 Otherwise, the scaling is done in-situ
2035 The default is taken from .asaprc (False)
2036
2037 """
2038 if insitu is None: insitu = rcParams["insitu"]
2039 self._math._setinsitu(insitu)
2040 varlist = vars()
2041 reftime = reftime or ""
2042 s = scantable(self._math._freq_align(self, reftime, method))
2043 s._add_history("freq_align", varlist)
2044 if insitu:
2045 self._assign(s)
2046 else:
2047 return s
2048
2049 @asaplog_post_dec
2050 def opacity(self, tau=None, insitu=None):
2051 """\
2052 Apply an opacity correction. The data
2053 and Tsys are multiplied by the correction factor.
2054
2055 Parameters:
2056
2057 tau: (list of) opacity from which the correction factor is
2058 exp(tau*ZD)
2059 where ZD is the zenith-distance.
2060 If a list is provided, it has to be of length nIF,
2061 nIF*nPol or 1 and in order of IF/POL, e.g.
2062 [opif0pol0, opif0pol1, opif1pol0 ...]
2063 if tau is `None` the opacities are determined from a
2064 model.
2065
2066 insitu: if False a new scantable is returned.
2067 Otherwise, the scaling is done in-situ
2068 The default is taken from .asaprc (False)
2069
2070 """
2071 if insitu is None:
2072 insitu = rcParams['insitu']
2073 self._math._setinsitu(insitu)
2074 varlist = vars()
2075 if not hasattr(tau, "__len__"):
2076 tau = [tau]
2077 s = scantable(self._math._opacity(self, tau))
2078 s._add_history("opacity", varlist)
2079 if insitu:
2080 self._assign(s)
2081 else:
2082 return s
2083
2084 @asaplog_post_dec
2085 def bin(self, width=5, insitu=None):
2086 """\
2087 Return a scan where all spectra have been binned up.
2088
2089 Parameters:
2090
2091 width: The bin width (default=5) in pixels
2092
2093 insitu: if False a new scantable is returned.
2094 Otherwise, the scaling is done in-situ
2095 The default is taken from .asaprc (False)
2096
2097 """
2098 if insitu is None:
2099 insitu = rcParams['insitu']
2100 self._math._setinsitu(insitu)
2101 varlist = vars()
2102 s = scantable(self._math._bin(self, width))
2103 s._add_history("bin", varlist)
2104 if insitu:
2105 self._assign(s)
2106 else:
2107 return s
2108
2109 @asaplog_post_dec
2110 def resample(self, width=5, method='cubic', insitu=None):
2111 """\
2112 Return a scan where all spectra have been binned up.
2113
2114 Parameters:
2115
2116 width: The bin width (default=5) in pixels
2117
2118 method: Interpolation method when correcting from a table.
2119 Values are "nearest", "linear", "cubic" (default)
2120 and "spline"
2121
2122 insitu: if False a new scantable is returned.
2123 Otherwise, the scaling is done in-situ
2124 The default is taken from .asaprc (False)
2125
2126 """
2127 if insitu is None:
2128 insitu = rcParams['insitu']
2129 self._math._setinsitu(insitu)
2130 varlist = vars()
2131 s = scantable(self._math._resample(self, method, width))
2132 s._add_history("resample", varlist)
2133 if insitu:
2134 self._assign(s)
2135 else:
2136 return s
2137
2138 @asaplog_post_dec
2139 def average_pol(self, mask=None, weight='none'):
2140 """\
2141 Average the Polarisations together.
2142
2143 Parameters:
2144
2145 mask: An optional mask defining the region, where the
2146 averaging will be applied. The output will have all
2147 specified points masked.
2148
2149 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2150 weighted), or 'tsys' (1/Tsys**2 weighted)
2151
2152 """
2153 varlist = vars()
2154 mask = mask or ()
2155 s = scantable(self._math._averagepol(self, mask, weight.upper()))
2156 s._add_history("average_pol", varlist)
2157 return s
2158
2159 @asaplog_post_dec
2160 def average_beam(self, mask=None, weight='none'):
2161 """\
2162 Average the Beams together.
2163
2164 Parameters:
2165 mask: An optional mask defining the region, where the
2166 averaging will be applied. The output will have all
2167 specified points masked.
2168
2169 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2170 weighted), or 'tsys' (1/Tsys**2 weighted)
2171
2172 """
2173 varlist = vars()
2174 mask = mask or ()
2175 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
2176 s._add_history("average_beam", varlist)
2177 return s
2178
2179 def parallactify(self, pflag):
2180 """\
2181 Set a flag to indicate whether this data should be treated as having
2182 been 'parallactified' (total phase == 0.0)
2183
2184 Parameters:
2185
2186 pflag: Bool indicating whether to turn this on (True) or
2187 off (False)
2188
2189 """
2190 varlist = vars()
2191 self._parallactify(pflag)
2192 self._add_history("parallactify", varlist)
2193
2194 @asaplog_post_dec
2195 def convert_pol(self, poltype=None):
2196 """\
2197 Convert the data to a different polarisation type.
2198 Note that you will need cross-polarisation terms for most conversions.
2199
2200 Parameters:
2201
2202 poltype: The new polarisation type. Valid types are:
2203 "linear", "circular", "stokes" and "linpol"
2204
2205 """
2206 varlist = vars()
2207 s = scantable(self._math._convertpol(self, poltype))
2208 s._add_history("convert_pol", varlist)
2209 return s
2210
2211 @asaplog_post_dec
2212 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False,
2213 insitu=None):
2214 """\
2215 Smooth the spectrum by the specified kernel (conserving flux).
2216
2217 Parameters:
2218
2219 kernel: The type of smoothing kernel. Select from
2220 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
2221 or 'poly'
2222
2223 width: The width of the kernel in pixels. For hanning this is
2224 ignored otherwise it defauls to 5 pixels.
2225 For 'gaussian' it is the Full Width Half
2226 Maximum. For 'boxcar' it is the full width.
2227 For 'rmedian' and 'poly' it is the half width.
2228
2229 order: Optional parameter for 'poly' kernel (default is 2), to
2230 specify the order of the polnomial. Ignored by all other
2231 kernels.
2232
2233 plot: plot the original and the smoothed spectra.
2234 In this each indivual fit has to be approved, by
2235 typing 'y' or 'n'
2236
2237 insitu: if False a new scantable is returned.
2238 Otherwise, the scaling is done in-situ
2239 The default is taken from .asaprc (False)
2240
2241 """
2242 if insitu is None: insitu = rcParams['insitu']
2243 self._math._setinsitu(insitu)
2244 varlist = vars()
2245
2246 if plot: orgscan = self.copy()
2247
2248 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
2249 s._add_history("smooth", varlist)
2250
2251 if plot:
2252 from asap.asapplotter import new_asaplot
2253 theplot = new_asaplot(rcParams['plotter.gui'])
2254 theplot.set_panels()
2255 ylab=s._get_ordinate_label()
2256 #theplot.palette(0,["#777777","red"])
2257 for r in xrange(s.nrow()):
2258 xsm=s._getabcissa(r)
2259 ysm=s._getspectrum(r)
2260 xorg=orgscan._getabcissa(r)
2261 yorg=orgscan._getspectrum(r)
2262 theplot.clear()
2263 theplot.hold()
2264 theplot.set_axes('ylabel',ylab)
2265 theplot.set_axes('xlabel',s._getabcissalabel(r))
2266 theplot.set_axes('title',s._getsourcename(r))
2267 theplot.set_line(label='Original',color="#777777")
2268 theplot.plot(xorg,yorg)
2269 theplot.set_line(label='Smoothed',color="red")
2270 theplot.plot(xsm,ysm)
2271 ### Ugly part for legend
2272 for i in [0,1]:
2273 theplot.subplots[0]['lines'].append(
2274 [theplot.subplots[0]['axes'].lines[i]]
2275 )
2276 theplot.release()
2277 ### Ugly part for legend
2278 theplot.subplots[0]['lines']=[]
2279 res = raw_input("Accept smoothing ([y]/n): ")
2280 if res.upper() == 'N':
2281 s._setspectrum(yorg, r)
2282 theplot.quit()
2283 del theplot
2284 del orgscan
2285
2286 if insitu: self._assign(s)
2287 else: return s
2288
2289 @asaplog_post_dec
2290 def _parse_wn(self, wn):
2291 if isinstance(wn, list) or isinstance(wn, tuple):
2292 return wn
2293 elif isinstance(wn, int):
2294 return [ wn ]
2295 elif isinstance(wn, str):
2296 if '-' in wn: # case 'a-b' : return [a,a+1,...,b-1,b]
2297 val = wn.split('-')
2298 val = [int(val[0]), int(val[1])]
2299 val.sort()
2300 res = [i for i in xrange(val[0], val[1]+1)]
2301 elif wn[:2] == '<=' or wn[:2] == '=<': # cases '<=a','=<a' : return [0,1,...,a-1,a]
2302 val = int(wn[2:])+1
2303 res = [i for i in xrange(val)]
2304 elif wn[-2:] == '>=' or wn[-2:] == '=>': # cases 'a>=','a=>' : return [0,1,...,a-1,a]
2305 val = int(wn[:-2])+1
2306 res = [i for i in xrange(val)]
2307 elif wn[0] == '<': # case '<a' : return [0,1,...,a-2,a-1]
2308 val = int(wn[1:])
2309 res = [i for i in xrange(val)]
2310 elif wn[-1] == '>': # case 'a>' : return [0,1,...,a-2,a-1]
2311 val = int(wn[:-1])
2312 res = [i for i in xrange(val)]
2313 elif wn[:2] == '>=' or wn[:2] == '=>': # cases '>=a','=>a' : return [a,a+1,...,a_nyq]
2314 val = int(wn[2:])
2315 res = [i for i in xrange(val, self.nchan()/2+1)]
2316 elif wn[-2:] == '<=' or wn[-2:] == '=<': # cases 'a<=','a=<' : return [a,a+1,...,a_nyq]
2317 val = int(wn[:-2])
2318 res = [i for i in xrange(val, self.nchan()/2+1)]
2319 elif wn[0] == '>': # case '>a' : return [a+1,a+2,...,a_nyq]
2320 val = int(wn[1:])+1
2321 res = [i for i in xrange(val, self.nchan()/2+1)]
2322 elif wn[-1] == '<': # case 'a<' : return [a+1,a+2,...,a_nyq]
2323 val = int(wn[:-1])+1
2324 res = [i for i in xrange(val, self.nchan()/2+1)]
2325
2326 return res
2327 else:
2328 msg = 'wrong value given for addwn/rejwn'
2329 raise RuntimeError(msg)
2330
2331
2332 @asaplog_post_dec
2333 def sinusoid_baseline(self, insitu=None, mask=None, applyfft=None,
2334 fftmethod=None, fftthresh=None,
2335 addwn=None, rejwn=None, clipthresh=None,
2336 clipniter=None, plot=None,
2337 getresidual=None, showprogress=None,
2338 minnrow=None, outlog=None, blfile=None):
2339 """\
2340 Return a scan which has been baselined (all rows) with sinusoidal
2341 functions.
2342
2343 Parameters:
2344 insitu: if False a new scantable is returned.
2345 Otherwise, the scaling is done in-situ
2346 The default is taken from .asaprc (False)
2347 mask: an optional mask
2348 applyfft: if True use some method, such as FFT, to find
2349 strongest sinusoidal components in the wavenumber
2350 domain to be used for baseline fitting.
2351 default is True.
2352 fftmethod: method to find the strong sinusoidal components.
2353 now only 'fft' is available and it is the default.
2354 fftthresh: the threshold to select wave numbers to be used for
2355 fitting from the distribution of amplitudes in the
2356 wavenumber domain.
2357 both float and string values accepted.
2358 given a float value, the unit is set to sigma.
2359 for string values, allowed formats include:
2360 'xsigma' or 'x' (= x-sigma level. e.g.,
2361 '3sigma'), or
2362 'topx' (= the x strongest ones, e.g. 'top5').
2363 default is 3.0 (unit: sigma).
2364 addwn: the additional wave numbers to be used for fitting.
2365 list or integer value is accepted to specify every
2366 wave numbers. also string value can be used in case
2367 you need to specify wave numbers in a certain range,
2368 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
2369 '<a' (= 0,1,...,a-2,a-1),
2370 '>=a' (= a, a+1, ... up to the maximum wave
2371 number corresponding to the Nyquist
2372 frequency for the case of FFT).
2373 default is [].
2374 rejwn: the wave numbers NOT to be used for fitting.
2375 can be set just as addwn but has higher priority:
2376 wave numbers which are specified both in addwn
2377 and rejwn will NOT be used. default is [].
2378 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2379 clipniter: maximum number of iteration of 'clipthresh'-sigma
2380 clipping (default is 0)
2381 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2382 plot the fit and the residual. In this each
2383 indivual fit has to be approved, by typing 'y'
2384 or 'n'
2385 getresidual: if False, returns best-fit values instead of
2386 residual. (default is True)
2387 showprogress: show progress status for large data.
2388 default is True.
2389 minnrow: minimum number of input spectra to show.
2390 default is 1000.
2391 outlog: Output the coefficients of the best-fit
2392 function to logger (default is False)
2393 blfile: Name of a text file in which the best-fit
2394 parameter values to be written
2395 (default is '': no file/logger output)
2396
2397 Example:
2398 # return a scan baselined by a combination of sinusoidal curves
2399 # having wave numbers in spectral window up to 10,
2400 # also with 3-sigma clipping, iteration up to 4 times
2401 bscan = scan.sinusoid_baseline(addwn='<=10',clipthresh=3.0,clipniter=4)
2402
2403 Note:
2404 The best-fit parameter values output in logger and/or blfile are now
2405 based on specunit of 'channel'.
2406 """
2407
2408 try:
2409 varlist = vars()
2410
2411 if insitu is None: insitu = rcParams['insitu']
2412 if insitu:
2413 workscan = self
2414 else:
2415 workscan = self.copy()
2416
2417 if mask is None: mask = [True for i in xrange(workscan.nchan())]
2418 if applyfft is None: applyfft = True
2419 if fftmethod is None: fftmethod = 'fft'
2420 if fftthresh is None: fftthresh = 3.0
2421 if addwn is None: addwn = []
2422 if rejwn is None: rejwn = []
2423 if clipthresh is None: clipthresh = 3.0
2424 if clipniter is None: clipniter = 0
2425 if plot is None: plot = False
2426 if getresidual is None: getresidual = True
2427 if showprogress is None: showprogress = True
2428 if minnrow is None: minnrow = 1000
2429 if outlog is None: outlog = False
2430 if blfile is None: blfile = ''
2431
2432 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
2433 workscan._sinusoid_baseline(mask, applyfft, fftmethod.lower(),
2434 str(fftthresh).lower(),
2435 workscan._parse_wn(addwn),
2436 workscan._parse_wn(rejwn), clipthresh,
2437 clipniter, getresidual,
2438 pack_progress_params(showprogress,
2439 minnrow), outlog,
2440 blfile)
2441 workscan._add_history('sinusoid_baseline', varlist)
2442
2443 if insitu:
2444 self._assign(workscan)
2445 else:
2446 return workscan
2447
2448 except RuntimeError, e:
2449 raise_fitting_failure_exception(e)
2450
2451
2452 @asaplog_post_dec
2453 def auto_sinusoid_baseline(self, insitu=None, mask=None, applyfft=None,
2454 fftmethod=None, fftthresh=None,
2455 addwn=None, rejwn=None, clipthresh=None,
2456 clipniter=None, edge=None, threshold=None,
2457 chan_avg_limit=None, plot=None,
2458 getresidual=None, showprogress=None,
2459 minnrow=None, outlog=None, blfile=None):
2460 """\
2461 Return a scan which has been baselined (all rows) with sinusoidal
2462 functions.
2463 Spectral lines are detected first using linefinder and masked out
2464 to avoid them affecting the baseline solution.
2465
2466 Parameters:
2467 insitu: if False a new scantable is returned.
2468 Otherwise, the scaling is done in-situ
2469 The default is taken from .asaprc (False)
2470 mask: an optional mask retreived from scantable
2471 applyfft: if True use some method, such as FFT, to find
2472 strongest sinusoidal components in the wavenumber
2473 domain to be used for baseline fitting.
2474 default is True.
2475 fftmethod: method to find the strong sinusoidal components.
2476 now only 'fft' is available and it is the default.
2477 fftthresh: the threshold to select wave numbers to be used for
2478 fitting from the distribution of amplitudes in the
2479 wavenumber domain.
2480 both float and string values accepted.
2481 given a float value, the unit is set to sigma.
2482 for string values, allowed formats include:
2483 'xsigma' or 'x' (= x-sigma level. e.g.,
2484 '3sigma'), or
2485 'topx' (= the x strongest ones, e.g. 'top5').
2486 default is 3.0 (unit: sigma).
2487 addwn: the additional wave numbers to be used for fitting.
2488 list or integer value is accepted to specify every
2489 wave numbers. also string value can be used in case
2490 you need to specify wave numbers in a certain range,
2491 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
2492 '<a' (= 0,1,...,a-2,a-1),
2493 '>=a' (= a, a+1, ... up to the maximum wave
2494 number corresponding to the Nyquist
2495 frequency for the case of FFT).
2496 default is [].
2497 rejwn: the wave numbers NOT to be used for fitting.
2498 can be set just as addwn but has higher priority:
2499 wave numbers which are specified both in addwn
2500 and rejwn will NOT be used. default is [].
2501 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2502 clipniter: maximum number of iteration of 'clipthresh'-sigma
2503 clipping (default is 0)
2504 edge: an optional number of channel to drop at
2505 the edge of spectrum. If only one value is
2506 specified, the same number will be dropped
2507 from both sides of the spectrum. Default
2508 is to keep all channels. Nested tuples
2509 represent individual edge selection for
2510 different IFs (a number of spectral channels
2511 can be different)
2512 threshold: the threshold used by line finder. It is
2513 better to keep it large as only strong lines
2514 affect the baseline solution.
2515 chan_avg_limit: a maximum number of consequtive spectral
2516 channels to average during the search of
2517 weak and broad lines. The default is no
2518 averaging (and no search for weak lines).
2519 If such lines can affect the fitted baseline
2520 (e.g. a high order polynomial is fitted),
2521 increase this parameter (usually values up
2522 to 8 are reasonable). Most users of this
2523 method should find the default value sufficient.
2524 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2525 plot the fit and the residual. In this each
2526 indivual fit has to be approved, by typing 'y'
2527 or 'n'
2528 getresidual: if False, returns best-fit values instead of
2529 residual. (default is True)
2530 showprogress: show progress status for large data.
2531 default is True.
2532 minnrow: minimum number of input spectra to show.
2533 default is 1000.
2534 outlog: Output the coefficients of the best-fit
2535 function to logger (default is False)
2536 blfile: Name of a text file in which the best-fit
2537 parameter values to be written
2538 (default is "": no file/logger output)
2539
2540 Example:
2541 bscan = scan.auto_sinusoid_baseline(addwn='<=10', insitu=False)
2542
2543 Note:
2544 The best-fit parameter values output in logger and/or blfile are now
2545 based on specunit of 'channel'.
2546 """
2547
2548 try:
2549 varlist = vars()
2550
2551 if insitu is None: insitu = rcParams['insitu']
2552 if insitu:
2553 workscan = self
2554 else:
2555 workscan = self.copy()
2556
2557 if mask is None: mask = [True for i in xrange(workscan.nchan())]
2558 if applyfft is None: applyfft = True
2559 if fftmethod is None: fftmethod = 'fft'
2560 if fftthresh is None: fftthresh = 3.0
2561 if addwn is None: addwn = []
2562 if rejwn is None: rejwn = []
2563 if clipthresh is None: clipthresh = 3.0
2564 if clipniter is None: clipniter = 0
2565 if edge is None: edge = (0,0)
2566 if threshold is None: threshold = 3
2567 if chan_avg_limit is None: chan_avg_limit = 1
2568 if plot is None: plot = False
2569 if getresidual is None: getresidual = True
2570 if showprogress is None: showprogress = True
2571 if minnrow is None: minnrow = 1000
2572 if outlog is None: outlog = False
2573 if blfile is None: blfile = ''
2574
2575 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
2576 workscan._auto_sinusoid_baseline(mask, applyfft,
2577 fftmethod.lower(),
2578 str(fftthresh).lower(),
2579 workscan._parse_wn(addwn),
2580 workscan._parse_wn(rejwn),
2581 clipthresh, clipniter,
2582 normalise_edge_param(edge),
2583 threshold, chan_avg_limit,
2584 getresidual,
2585 pack_progress_params(showprogress,
2586 minnrow),
2587 outlog, blfile)
2588 workscan._add_history("auto_sinusoid_baseline", varlist)
2589
2590 if insitu:
2591 self._assign(workscan)
2592 else:
2593 return workscan
2594
2595 except RuntimeError, e:
2596 raise_fitting_failure_exception(e)
2597
2598 @asaplog_post_dec
2599 def cspline_baseline(self, insitu=None, mask=None, npiece=None,
2600 clipthresh=None, clipniter=None, plot=None,
2601 getresidual=None, showprogress=None, minnrow=None,
2602 outlog=None, blfile=None):
2603 """\
2604 Return a scan which has been baselined (all rows) by cubic spline
2605 function (piecewise cubic polynomial).
2606
2607 Parameters:
2608 insitu: If False a new scantable is returned.
2609 Otherwise, the scaling is done in-situ
2610 The default is taken from .asaprc (False)
2611 mask: An optional mask
2612 npiece: Number of pieces. (default is 2)
2613 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2614 clipniter: maximum number of iteration of 'clipthresh'-sigma
2615 clipping (default is 0)
2616 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2617 plot the fit and the residual. In this each
2618 indivual fit has to be approved, by typing 'y'
2619 or 'n'
2620 getresidual: if False, returns best-fit values instead of
2621 residual. (default is True)
2622 showprogress: show progress status for large data.
2623 default is True.
2624 minnrow: minimum number of input spectra to show.
2625 default is 1000.
2626 outlog: Output the coefficients of the best-fit
2627 function to logger (default is False)
2628 blfile: Name of a text file in which the best-fit
2629 parameter values to be written
2630 (default is "": no file/logger output)
2631
2632 Example:
2633 # return a scan baselined by a cubic spline consisting of 2 pieces
2634 # (i.e., 1 internal knot),
2635 # also with 3-sigma clipping, iteration up to 4 times
2636 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
2637
2638 Note:
2639 The best-fit parameter values output in logger and/or blfile are now
2640 based on specunit of 'channel'.
2641 """
2642
2643 try:
2644 varlist = vars()
2645
2646 if insitu is None: insitu = rcParams['insitu']
2647 if insitu:
2648 workscan = self
2649 else:
2650 workscan = self.copy()
2651
2652 if mask is None: mask = [True for i in xrange(workscan.nchan())]
2653 if npiece is None: npiece = 2
2654 if clipthresh is None: clipthresh = 3.0
2655 if clipniter is None: clipniter = 0
2656 if plot is None: plot = False
2657 if getresidual is None: getresidual = True
2658 if showprogress is None: showprogress = True
2659 if minnrow is None: minnrow = 1000
2660 if outlog is None: outlog = False
2661 if blfile is None: blfile = ''
2662
2663 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
2664 workscan._cspline_baseline(mask, npiece, clipthresh, clipniter,
2665 getresidual,
2666 pack_progress_params(showprogress,
2667 minnrow), outlog,
2668 blfile)
2669 workscan._add_history("cspline_baseline", varlist)
2670
2671 if insitu:
2672 self._assign(workscan)
2673 else:
2674 return workscan
2675
2676 except RuntimeError, e:
2677 raise_fitting_failure_exception(e)
2678
2679 @asaplog_post_dec
2680 def auto_cspline_baseline(self, insitu=None, mask=None, npiece=None,
2681 clipthresh=None, clipniter=None,
2682 edge=None, threshold=None, chan_avg_limit=None,
2683 getresidual=None, plot=None,
2684 showprogress=None, minnrow=None, outlog=None,
2685 blfile=None):
2686 """\
2687 Return a scan which has been baselined (all rows) by cubic spline
2688 function (piecewise cubic polynomial).
2689 Spectral lines are detected first using linefinder and masked out
2690 to avoid them affecting the baseline solution.
2691
2692 Parameters:
2693 insitu: if False a new scantable is returned.
2694 Otherwise, the scaling is done in-situ
2695 The default is taken from .asaprc (False)
2696 mask: an optional mask retreived from scantable
2697 npiece: Number of pieces. (default is 2)
2698 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2699 clipniter: maximum number of iteration of 'clipthresh'-sigma
2700 clipping (default is 0)
2701 edge: an optional number of channel to drop at
2702 the edge of spectrum. If only one value is
2703 specified, the same number will be dropped
2704 from both sides of the spectrum. Default
2705 is to keep all channels. Nested tuples
2706 represent individual edge selection for
2707 different IFs (a number of spectral channels
2708 can be different)
2709 threshold: the threshold used by line finder. It is
2710 better to keep it large as only strong lines
2711 affect the baseline solution.
2712 chan_avg_limit: a maximum number of consequtive spectral
2713 channels to average during the search of
2714 weak and broad lines. The default is no
2715 averaging (and no search for weak lines).
2716 If such lines can affect the fitted baseline
2717 (e.g. a high order polynomial is fitted),
2718 increase this parameter (usually values up
2719 to 8 are reasonable). Most users of this
2720 method should find the default value sufficient.
2721 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2722 plot the fit and the residual. In this each
2723 indivual fit has to be approved, by typing 'y'
2724 or 'n'
2725 getresidual: if False, returns best-fit values instead of
2726 residual. (default is True)
2727 showprogress: show progress status for large data.
2728 default is True.
2729 minnrow: minimum number of input spectra to show.
2730 default is 1000.
2731 outlog: Output the coefficients of the best-fit
2732 function to logger (default is False)
2733 blfile: Name of a text file in which the best-fit
2734 parameter values to be written
2735 (default is "": no file/logger output)
2736
2737 Example:
2738 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
2739
2740 Note:
2741 The best-fit parameter values output in logger and/or blfile are now
2742 based on specunit of 'channel'.
2743 """
2744
2745 try:
2746 varlist = vars()
2747
2748 if insitu is None: insitu = rcParams['insitu']
2749 if insitu:
2750 workscan = self
2751 else:
2752 workscan = self.copy()
2753
2754 if mask is None: mask = [True for i in xrange(workscan.nchan())]
2755 if npiece is None: npiece = 2
2756 if clipthresh is None: clipthresh = 3.0
2757 if clipniter is None: clipniter = 0
2758 if edge is None: edge = (0, 0)
2759 if threshold is None: threshold = 3
2760 if chan_avg_limit is None: chan_avg_limit = 1
2761 if plot is None: plot = False
2762 if getresidual is None: getresidual = True
2763 if showprogress is None: showprogress = True
2764 if minnrow is None: minnrow = 1000
2765 if outlog is None: outlog = False
2766 if blfile is None: blfile = ''
2767
2768 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
2769 workscan._auto_cspline_baseline(mask, npiece, clipthresh,
2770 clipniter,
2771 normalise_edge_param(edge),
2772 threshold,
2773 chan_avg_limit, getresidual,
2774 pack_progress_params(showprogress,
2775 minnrow),
2776 outlog, blfile)
2777 workscan._add_history("auto_cspline_baseline", varlist)
2778
2779 if insitu:
2780 self._assign(workscan)
2781 else:
2782 return workscan
2783
2784 except RuntimeError, e:
2785 raise_fitting_failure_exception(e)
2786
2787 @asaplog_post_dec
2788 def poly_baseline(self, mask=None, order=None, insitu=None, plot=None,
2789 getresidual=None, showprogress=None, minnrow=None,
2790 outlog=None, blfile=None):
2791 """\
2792 Return a scan which has been baselined (all rows) by a polynomial.
2793 Parameters:
2794 insitu: if False a new scantable is returned.
2795 Otherwise, the scaling is done in-situ
2796 The default is taken from .asaprc (False)
2797 mask: an optional mask
2798 order: the order of the polynomial (default is 0)
2799 plot: plot the fit and the residual. In this each
2800 indivual fit has to be approved, by typing 'y'
2801 or 'n'
2802 getresidual: if False, returns best-fit values instead of
2803 residual. (default is True)
2804 showprogress: show progress status for large data.
2805 default is True.
2806 minnrow: minimum number of input spectra to show.
2807 default is 1000.
2808 outlog: Output the coefficients of the best-fit
2809 function to logger (default is False)
2810 blfile: Name of a text file in which the best-fit
2811 parameter values to be written
2812 (default is "": no file/logger output)
2813
2814 Example:
2815 # return a scan baselined by a third order polynomial,
2816 # not using a mask
2817 bscan = scan.poly_baseline(order=3)
2818 """
2819
2820 try:
2821 varlist = vars()
2822
2823 if insitu is None:
2824 insitu = rcParams["insitu"]
2825 if insitu:
2826 workscan = self
2827 else:
2828 workscan = self.copy()
2829
2830 if mask is None: mask = [True for i in \
2831 xrange(workscan.nchan())]
2832 if order is None: order = 0
2833 if plot is None: plot = False
2834 if getresidual is None: getresidual = True
2835 if showprogress is None: showprogress = True
2836 if minnrow is None: minnrow = 1000
2837 if outlog is None: outlog = False
2838 if blfile is None: blfile = ""
2839
2840 if plot:
2841 outblfile = (blfile != "") and \
2842 os.path.exists(os.path.expanduser(
2843 os.path.expandvars(blfile))
2844 )
2845 if outblfile:
2846 blf = open(blfile, "a")
2847
2848 f = fitter()
2849 f.set_function(lpoly=order)
2850
2851 rows = xrange(workscan.nrow())
2852 #if len(rows) > 0: workscan._init_blinfo()
2853
2854 for r in rows:
2855 f.x = workscan._getabcissa(r)
2856 f.y = workscan._getspectrum(r)
2857 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
2858 f.data = None
2859 f.fit()
2860
2861 f.plot(residual=True)
2862 accept_fit = raw_input("Accept fit ( [y]/n ): ")
2863 if accept_fit.upper() == "N":
2864 #workscan._append_blinfo(None, None, None)
2865 continue
2866
2867 blpars = f.get_parameters()
2868 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
2869 #workscan._append_blinfo(blpars, masklist, f.mask)
2870 workscan._setspectrum((f.fitter.getresidual()
2871 if getresidual else f.fitter.getfit()), r)
2872
2873 if outblfile:
2874 rms = workscan.get_rms(f.mask, r)
2875 dataout = \
2876 workscan.format_blparams_row(blpars["params"],
2877 blpars["fixed"],
2878 rms, str(masklist),
2879 r, True)
2880 blf.write(dataout)
2881
2882 f._p.unmap()
2883 f._p = None
2884
2885 if outblfile:
2886 blf.close()
2887 else:
2888 workscan._poly_baseline(mask, order, getresidual,
2889 pack_progress_params(showprogress,
2890 minnrow),
2891 outlog, blfile)
2892
2893 workscan._add_history("poly_baseline", varlist)
2894
2895 if insitu:
2896 self._assign(workscan)
2897 else:
2898 return workscan
2899
2900 except RuntimeError, e:
2901 raise_fitting_failure_exception(e)
2902
2903 @asaplog_post_dec
2904 def auto_poly_baseline(self, mask=None, order=None, edge=None,
2905 threshold=None, chan_avg_limit=None,
2906 plot=None, insitu=None,
2907 getresidual=None, showprogress=None,
2908 minnrow=None, outlog=None, blfile=None):
2909 """\
2910 Return a scan which has been baselined (all rows) by a polynomial.
2911 Spectral lines are detected first using linefinder and masked out
2912 to avoid them affecting the baseline solution.
2913
2914 Parameters:
2915 insitu: if False a new scantable is returned.
2916 Otherwise, the scaling is done in-situ
2917 The default is taken from .asaprc (False)
2918 mask: an optional mask retreived from scantable
2919 order: the order of the polynomial (default is 0)
2920 edge: an optional number of channel to drop at
2921 the edge of spectrum. If only one value is
2922 specified, the same number will be dropped
2923 from both sides of the spectrum. Default
2924 is to keep all channels. Nested tuples
2925 represent individual edge selection for
2926 different IFs (a number of spectral channels
2927 can be different)
2928 threshold: the threshold used by line finder. It is
2929 better to keep it large as only strong lines
2930 affect the baseline solution.
2931 chan_avg_limit: a maximum number of consequtive spectral
2932 channels to average during the search of
2933 weak and broad lines. The default is no
2934 averaging (and no search for weak lines).
2935 If such lines can affect the fitted baseline
2936 (e.g. a high order polynomial is fitted),
2937 increase this parameter (usually values up
2938 to 8 are reasonable). Most users of this
2939 method should find the default value sufficient.
2940 plot: plot the fit and the residual. In this each
2941 indivual fit has to be approved, by typing 'y'
2942 or 'n'
2943 getresidual: if False, returns best-fit values instead of
2944 residual. (default is True)
2945 showprogress: show progress status for large data.
2946 default is True.
2947 minnrow: minimum number of input spectra to show.
2948 default is 1000.
2949 outlog: Output the coefficients of the best-fit
2950 function to logger (default is False)
2951 blfile: Name of a text file in which the best-fit
2952 parameter values to be written
2953 (default is "": no file/logger output)
2954
2955 Example:
2956 bscan = scan.auto_poly_baseline(order=7, insitu=False)
2957 """
2958
2959 try:
2960 varlist = vars()
2961
2962 if insitu is None:
2963 insitu = rcParams['insitu']
2964 if insitu:
2965 workscan = self
2966 else:
2967 workscan = self.copy()
2968
2969 if mask is None: mask = [True for i in xrange(workscan.nchan())]
2970 if order is None: order = 0
2971 if edge is None: edge = (0, 0)
2972 if threshold is None: threshold = 3
2973 if chan_avg_limit is None: chan_avg_limit = 1
2974 if plot is None: plot = False
2975 if getresidual is None: getresidual = True
2976 if showprogress is None: showprogress = True
2977 if minnrow is None: minnrow = 1000
2978 if outlog is None: outlog = False
2979 if blfile is None: blfile = ''
2980
2981 edge = normalise_edge_param(edge)
2982
2983 if plot:
2984 outblfile = (blfile != "") and \
2985 os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
2986 if outblfile: blf = open(blfile, "a")
2987
2988 from asap.asaplinefind import linefinder
2989 fl = linefinder()
2990 fl.set_options(threshold=threshold, avg_limit=chan_avg_limit)
2991 fl.set_scan(workscan)
2992
2993 f = fitter()
2994 f.set_function(lpoly=order)
2995
2996 rows = xrange(workscan.nrow())
2997 #if len(rows) > 0: workscan._init_blinfo()
2998
2999 for r in rows:
3000 idx = 2*workscan.getif(r)
3001 fl.find_lines(r, mask_and(mask, workscan._getmask(r)),
3002 edge[idx:idx+2]) # (CAS-1434)
3003
3004 f.x = workscan._getabcissa(r)
3005 f.y = workscan._getspectrum(r)
3006 f.mask = fl.get_mask()
3007 f.data = None
3008 f.fit()
3009
3010 f.plot(residual=True)
3011 accept_fit = raw_input("Accept fit ( [y]/n ): ")
3012 if accept_fit.upper() == "N":
3013 #workscan._append_blinfo(None, None, None)
3014 continue
3015
3016 blpars = f.get_parameters()
3017 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
3018 #workscan._append_blinfo(blpars, masklist, f.mask)
3019 workscan._setspectrum(
3020 (f.fitter.getresidual() if getresidual
3021 else f.fitter.getfit()), r
3022 )
3023
3024 if outblfile:
3025 rms = workscan.get_rms(f.mask, r)
3026 dataout = \
3027 workscan.format_blparams_row(blpars["params"],
3028 blpars["fixed"],
3029 rms, str(masklist),
3030 r, True)
3031 blf.write(dataout)
3032
3033 f._p.unmap()
3034 f._p = None
3035
3036 if outblfile: blf.close()
3037 else:
3038 workscan._auto_poly_baseline(mask, order, edge, threshold,
3039 chan_avg_limit, getresidual,
3040 pack_progress_params(showprogress,
3041 minnrow),
3042 outlog, blfile)
3043
3044 workscan._add_history("auto_poly_baseline", varlist)
3045
3046 if insitu:
3047 self._assign(workscan)
3048 else:
3049 return workscan
3050
3051 except RuntimeError, e:
3052 raise_fitting_failure_exception(e)
3053
3054 def _init_blinfo(self):
3055 """\
3056 Initialise the following three auxiliary members:
3057 blpars : parameters of the best-fit baseline,
3058 masklists : mask data (edge positions of masked channels) and
3059 actualmask : mask data (in boolean list),
3060 to keep for use later (including output to logger/text files).
3061 Used by poly_baseline() and auto_poly_baseline() in case of
3062 'plot=True'.
3063 """
3064 self.blpars = []
3065 self.masklists = []
3066 self.actualmask = []
3067 return
3068
3069 def _append_blinfo(self, data_blpars, data_masklists, data_actualmask):
3070 """\
3071 Append baseline-fitting related info to blpars, masklist and
3072 actualmask.
3073 """
3074 self.blpars.append(data_blpars)
3075 self.masklists.append(data_masklists)
3076 self.actualmask.append(data_actualmask)
3077 return
3078
3079 @asaplog_post_dec
3080 def rotate_linpolphase(self, angle):
3081 """\
3082 Rotate the phase of the complex polarization O=Q+iU correlation.
3083 This is always done in situ in the raw data. So if you call this
3084 function more than once then each call rotates the phase further.
3085
3086 Parameters:
3087
3088 angle: The angle (degrees) to rotate (add) by.
3089
3090 Example::
3091
3092 scan.rotate_linpolphase(2.3)
3093
3094 """
3095 varlist = vars()
3096 self._math._rotate_linpolphase(self, angle)
3097 self._add_history("rotate_linpolphase", varlist)
3098 return
3099
3100 @asaplog_post_dec
3101 def rotate_xyphase(self, angle):
3102 """\
3103 Rotate the phase of the XY correlation. This is always done in situ
3104 in the data. So if you call this function more than once
3105 then each call rotates the phase further.
3106
3107 Parameters:
3108
3109 angle: The angle (degrees) to rotate (add) by.
3110
3111 Example::
3112
3113 scan.rotate_xyphase(2.3)
3114
3115 """
3116 varlist = vars()
3117 self._math._rotate_xyphase(self, angle)
3118 self._add_history("rotate_xyphase", varlist)
3119 return
3120
3121 @asaplog_post_dec
3122 def swap_linears(self):
3123 """\
3124 Swap the linear polarisations XX and YY, or better the first two
3125 polarisations as this also works for ciculars.
3126 """
3127 varlist = vars()
3128 self._math._swap_linears(self)
3129 self._add_history("swap_linears", varlist)
3130 return
3131
3132 @asaplog_post_dec
3133 def invert_phase(self):
3134 """\
3135 Invert the phase of the complex polarisation
3136 """
3137 varlist = vars()
3138 self._math._invert_phase(self)
3139 self._add_history("invert_phase", varlist)
3140 return
3141
3142 @asaplog_post_dec
3143 def add(self, offset, insitu=None):
3144 """\
3145 Return a scan where all spectra have the offset added
3146
3147 Parameters:
3148
3149 offset: the offset
3150
3151 insitu: if False a new scantable is returned.
3152 Otherwise, the scaling is done in-situ
3153 The default is taken from .asaprc (False)
3154
3155 """
3156 if insitu is None: insitu = rcParams['insitu']
3157 self._math._setinsitu(insitu)
3158 varlist = vars()
3159 s = scantable(self._math._unaryop(self, offset, "ADD", False))
3160 s._add_history("add", varlist)
3161 if insitu:
3162 self._assign(s)
3163 else:
3164 return s
3165
3166 @asaplog_post_dec
3167 def scale(self, factor, tsys=True, insitu=None):
3168 """\
3169
3170 Return a scan where all spectra are scaled by the given 'factor'
3171
3172 Parameters:
3173
3174 factor: the scaling factor (float or 1D float list)
3175
3176 insitu: if False a new scantable is returned.
3177 Otherwise, the scaling is done in-situ
3178 The default is taken from .asaprc (False)
3179
3180 tsys: if True (default) then apply the operation to Tsys
3181 as well as the data
3182
3183 """
3184 if insitu is None: insitu = rcParams['insitu']
3185 self._math._setinsitu(insitu)
3186 varlist = vars()
3187 s = None
3188 import numpy
3189 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
3190 if isinstance(factor[0], list) or isinstance(factor[0],
3191 numpy.ndarray):
3192 from asapmath import _array2dOp
3193 s = _array2dOp( self, factor, "MUL", tsys, insitu )
3194 else:
3195 s = scantable( self._math._arrayop( self, factor,
3196 "MUL", tsys ) )
3197 else:
3198 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
3199 s._add_history("scale", varlist)
3200 if insitu:
3201 self._assign(s)
3202 else:
3203 return s
3204
3205 @preserve_selection
3206 def set_sourcetype(self, match, matchtype="pattern",
3207 sourcetype="reference"):
3208 """\
3209 Set the type of the source to be an source or reference scan
3210 using the provided pattern.
3211
3212 Parameters:
3213
3214 match: a Unix style pattern, regular expression or selector
3215
3216 matchtype: 'pattern' (default) UNIX style pattern or
3217 'regex' regular expression
3218
3219 sourcetype: the type of the source to use (source/reference)
3220
3221 """
3222 varlist = vars()
3223 basesel = self.get_selection()
3224 stype = -1
3225 if sourcetype.lower().startswith("r"):
3226 stype = 1
3227 elif sourcetype.lower().startswith("s"):
3228 stype = 0
3229 else:
3230 raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
3231 if matchtype.lower().startswith("p"):
3232 matchtype = "pattern"
3233 elif matchtype.lower().startswith("r"):
3234 matchtype = "regex"
3235 else:
3236 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
3237 sel = selector()
3238 if isinstance(match, selector):
3239 sel = match
3240 else:
3241 sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
3242 self._setsourcetype(stype)
3243 self._add_history("set_sourcetype", varlist)
3244
3245 @asaplog_post_dec
3246 @preserve_selection
3247 def auto_quotient(self, preserve=True, mode='paired', verify=False):
3248 """\
3249 This function allows to build quotients automatically.
3250 It assumes the observation to have the same number of
3251 "ons" and "offs"
3252
3253 Parameters:
3254
3255 preserve: you can preserve (default) the continuum or
3256 remove it. The equations used are
3257
3258 preserve: Output = Toff * (on/off) - Toff
3259
3260 remove: Output = Toff * (on/off) - Ton
3261
3262 mode: the on/off detection mode
3263 'paired' (default)
3264 identifies 'off' scans by the
3265 trailing '_R' (Mopra/Parkes) or
3266 '_e'/'_w' (Tid) and matches
3267 on/off pairs from the observing pattern
3268 'time'
3269 finds the closest off in time
3270
3271 .. todo:: verify argument is not implemented
3272
3273 """
3274 varlist = vars()
3275 modes = ["time", "paired"]
3276 if not mode in modes:
3277 msg = "please provide valid mode. Valid modes are %s" % (modes)
3278 raise ValueError(msg)
3279 s = None
3280 if mode.lower() == "paired":
3281 sel = self.get_selection()
3282 sel.set_query("SRCTYPE==psoff")
3283 self.set_selection(sel)
3284 offs = self.copy()
3285 sel.set_query("SRCTYPE==pson")
3286 self.set_selection(sel)
3287 ons = self.copy()
3288 s = scantable(self._math._quotient(ons, offs, preserve))
3289 elif mode.lower() == "time":
3290 s = scantable(self._math._auto_quotient(self, mode, preserve))
3291 s._add_history("auto_quotient", varlist)
3292 return s
3293
3294 @asaplog_post_dec
3295 def mx_quotient(self, mask = None, weight='median', preserve=True):
3296 """\
3297 Form a quotient using "off" beams when observing in "MX" mode.
3298
3299 Parameters:
3300
3301 mask: an optional mask to be used when weight == 'stddev'
3302
3303 weight: How to average the off beams. Default is 'median'.
3304
3305 preserve: you can preserve (default) the continuum or
3306 remove it. The equations used are:
3307
3308 preserve: Output = Toff * (on/off) - Toff
3309
3310 remove: Output = Toff * (on/off) - Ton
3311
3312 """
3313 mask = mask or ()
3314 varlist = vars()
3315 on = scantable(self._math._mx_extract(self, 'on'))
3316 preoff = scantable(self._math._mx_extract(self, 'off'))
3317 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
3318 from asapmath import quotient
3319 q = quotient(on, off, preserve)
3320 q._add_history("mx_quotient", varlist)
3321 return q
3322
3323 @asaplog_post_dec
3324 def freq_switch(self, insitu=None):
3325 """\
3326 Apply frequency switching to the data.
3327
3328 Parameters:
3329
3330 insitu: if False a new scantable is returned.
3331 Otherwise, the swictching is done in-situ
3332 The default is taken from .asaprc (False)
3333
3334 """
3335 if insitu is None: insitu = rcParams['insitu']
3336 self._math._setinsitu(insitu)
3337 varlist = vars()
3338 s = scantable(self._math._freqswitch(self))
3339 s._add_history("freq_switch", varlist)
3340 if insitu:
3341 self._assign(s)
3342 else:
3343 return s
3344
3345 @asaplog_post_dec
3346 def recalc_azel(self):
3347 """Recalculate the azimuth and elevation for each position."""
3348 varlist = vars()
3349 self._recalcazel()
3350 self._add_history("recalc_azel", varlist)
3351 return
3352
3353 @asaplog_post_dec
3354 def __add__(self, other):
3355 varlist = vars()
3356 s = None
3357 if isinstance(other, scantable):
3358 s = scantable(self._math._binaryop(self, other, "ADD"))
3359 elif isinstance(other, float):
3360 s = scantable(self._math._unaryop(self, other, "ADD", False))
3361 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3362 if isinstance(other[0], list) \
3363 or isinstance(other[0], numpy.ndarray):
3364 from asapmath import _array2dOp
3365 s = _array2dOp( self.copy(), other, "ADD", False )
3366 else:
3367 s = scantable( self._math._arrayop( self.copy(), other,
3368 "ADD", False ) )
3369 else:
3370 raise TypeError("Other input is not a scantable or float value")
3371 s._add_history("operator +", varlist)
3372 return s
3373
3374 @asaplog_post_dec
3375 def __sub__(self, other):
3376 """
3377 implicit on all axes and on Tsys
3378 """
3379 varlist = vars()
3380 s = None
3381 if isinstance(other, scantable):
3382 s = scantable(self._math._binaryop(self, other, "SUB"))
3383 elif isinstance(other, float):
3384 s = scantable(self._math._unaryop(self, other, "SUB", False))
3385 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3386 if isinstance(other[0], list) \
3387 or isinstance(other[0], numpy.ndarray):
3388 from asapmath import _array2dOp
3389 s = _array2dOp( self.copy(), other, "SUB", False )
3390 else:
3391 s = scantable( self._math._arrayop( self.copy(), other,
3392 "SUB", False ) )
3393 else:
3394 raise TypeError("Other input is not a scantable or float value")
3395 s._add_history("operator -", varlist)
3396 return s
3397
3398 @asaplog_post_dec
3399 def __mul__(self, other):
3400 """
3401 implicit on all axes and on Tsys
3402 """
3403 varlist = vars()
3404 s = None
3405 if isinstance(other, scantable):
3406 s = scantable(self._math._binaryop(self, other, "MUL"))
3407 elif isinstance(other, float):
3408 s = scantable(self._math._unaryop(self, other, "MUL", False))
3409 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3410 if isinstance(other[0], list) \
3411 or isinstance(other[0], numpy.ndarray):
3412 from asapmath import _array2dOp
3413 s = _array2dOp( self.copy(), other, "MUL", False )
3414 else:
3415 s = scantable( self._math._arrayop( self.copy(), other,
3416 "MUL", False ) )
3417 else:
3418 raise TypeError("Other input is not a scantable or float value")
3419 s._add_history("operator *", varlist)
3420 return s
3421
3422
3423 @asaplog_post_dec
3424 def __div__(self, other):
3425 """
3426 implicit on all axes and on Tsys
3427 """
3428 varlist = vars()
3429 s = None
3430 if isinstance(other, scantable):
3431 s = scantable(self._math._binaryop(self, other, "DIV"))
3432 elif isinstance(other, float):
3433 if other == 0.0:
3434 raise ZeroDivisionError("Dividing by zero is not recommended")
3435 s = scantable(self._math._unaryop(self, other, "DIV", False))
3436 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3437 if isinstance(other[0], list) \
3438 or isinstance(other[0], numpy.ndarray):
3439 from asapmath import _array2dOp
3440 s = _array2dOp( self.copy(), other, "DIV", False )
3441 else:
3442 s = scantable( self._math._arrayop( self.copy(), other,
3443 "DIV", False ) )
3444 else:
3445 raise TypeError("Other input is not a scantable or float value")
3446 s._add_history("operator /", varlist)
3447 return s
3448
3449 @asaplog_post_dec
3450 def get_fit(self, row=0):
3451 """\
3452 Print or return the stored fits for a row in the scantable
3453
3454 Parameters:
3455
3456 row: the row which the fit has been applied to.
3457
3458 """
3459 if row > self.nrow():
3460 return
3461 from asap.asapfit import asapfit
3462 fit = asapfit(self._getfit(row))
3463 asaplog.push( '%s' %(fit) )
3464 return fit.as_dict()
3465
3466 @preserve_selection
3467 def flag_nans(self):
3468 """\
3469 Utility function to flag NaN values in the scantable.
3470 """
3471 import numpy
3472 basesel = self.get_selection()
3473 for i in range(self.nrow()):
3474 sel = self.get_row_selector(i)
3475 self.set_selection(basesel+sel)
3476 nans = numpy.isnan(self._getspectrum(0))
3477 if numpy.any(nans):
3478 bnans = [ bool(v) for v in nans]
3479 self.flag(bnans)
3480
3481 def get_row_selector(self, rowno):
3482 return selector(rows=[rowno])
3483
3484 def _add_history(self, funcname, parameters):
3485 if not rcParams['scantable.history']:
3486 return
3487 # create date
3488 sep = "##"
3489 from datetime import datetime
3490 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
3491 hist = dstr+sep
3492 hist += funcname+sep#cdate+sep
3493 if parameters.has_key('self'):
3494 del parameters['self']
3495 for k, v in parameters.iteritems():
3496 if type(v) is dict:
3497 for k2, v2 in v.iteritems():
3498 hist += k2
3499 hist += "="
3500 if isinstance(v2, scantable):
3501 hist += 'scantable'
3502 elif k2 == 'mask':
3503 if isinstance(v2, list) or isinstance(v2, tuple):
3504 hist += str(self._zip_mask(v2))
3505 else:
3506 hist += str(v2)
3507 else:
3508 hist += str(v2)
3509 else:
3510 hist += k
3511 hist += "="
3512 if isinstance(v, scantable):
3513 hist += 'scantable'
3514 elif k == 'mask':
3515 if isinstance(v, list) or isinstance(v, tuple):
3516 hist += str(self._zip_mask(v))
3517 else:
3518 hist += str(v)
3519 else:
3520 hist += str(v)
3521 hist += sep
3522 hist = hist[:-2] # remove trailing '##'
3523 self._addhistory(hist)
3524
3525
3526 def _zip_mask(self, mask):
3527 mask = list(mask)
3528 i = 0
3529 segments = []
3530 while mask[i:].count(1):
3531 i += mask[i:].index(1)
3532 if mask[i:].count(0):
3533 j = i + mask[i:].index(0)
3534 else:
3535 j = len(mask)
3536 segments.append([i, j])
3537 i = j
3538 return segments
3539
3540 def _get_ordinate_label(self):
3541 fu = "("+self.get_fluxunit()+")"
3542 import re
3543 lbl = "Intensity"
3544 if re.match(".K.", fu):
3545 lbl = "Brightness Temperature "+ fu
3546 elif re.match(".Jy.", fu):
3547 lbl = "Flux density "+ fu
3548 return lbl
3549
3550 def _check_ifs(self):
3551# return len(set([self.nchan(i) for i in self.getifnos()])) == 1
3552 nchans = [self.nchan(i) for i in self.getifnos()]
3553 nchans = filter(lambda t: t > 0, nchans)
3554 return (sum(nchans)/len(nchans) == nchans[0])
3555
3556 @asaplog_post_dec
3557 def _fill(self, names, unit, average, opts={}):
3558 first = True
3559 fullnames = []
3560 for name in names:
3561 name = os.path.expandvars(name)
3562 name = os.path.expanduser(name)
3563 if not os.path.exists(name):
3564 msg = "File '%s' does not exists" % (name)
3565 raise IOError(msg)
3566 fullnames.append(name)
3567 if average:
3568 asaplog.push('Auto averaging integrations')
3569 stype = int(rcParams['scantable.storage'].lower() == 'disk')
3570 for name in fullnames:
3571 tbl = Scantable(stype)
3572 if is_ms( name ):
3573 r = msfiller( tbl )
3574 else:
3575 r = filler( tbl )
3576 rx = rcParams['scantable.reference']
3577 r.setreferenceexpr(rx)
3578 msg = "Importing %s..." % (name)
3579 asaplog.push(msg, False)
3580 r.open(name, opts)
3581 r.fill()
3582 if average:
3583 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
3584 if not first:
3585 tbl = self._math._merge([self, tbl])
3586 Scantable.__init__(self, tbl)
3587 r.close()
3588 del r, tbl
3589 first = False
3590 #flush log
3591 asaplog.post()
3592 if unit is not None:
3593 self.set_fluxunit(unit)
3594 if not is_casapy():
3595 self.set_freqframe(rcParams['scantable.freqframe'])
3596
3597
3598 def __getitem__(self, key):
3599 if key < 0:
3600 key += self.nrow()
3601 if key >= self.nrow():
3602 raise IndexError("Row index out of range.")
3603 return self._getspectrum(key)
3604
3605 def __setitem__(self, key, value):
3606 if key < 0:
3607 key += self.nrow()
3608 if key >= self.nrow():
3609 raise IndexError("Row index out of range.")
3610 if not hasattr(value, "__len__") or \
3611 len(value) > self.nchan(self.getif(key)):
3612 raise ValueError("Spectrum length doesn't match.")
3613 return self._setspectrum(value, key)
3614
3615 def __len__(self):
3616 return self.nrow()
3617
3618 def __iter__(self):
3619 for i in range(len(self)):
3620 yield self[i]
Note: See TracBrowser for help on using the repository browser.