source: trunk/python/scantable.py@ 2751

Last change on this file since 2751 was 2751, checked in by Takeshi Nakazato, 12 years ago

New Development: No

JIRA Issue: No

Ready for Test: Yes

Interface Changes: No

What Interface Changed: Please list interface changes

Test Programs: List test programs

Put in Release Notes: Yes/No

Module(s): Module Names change impacts.

Description: Describe your changes here...

Update is_scantable.


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 162.3 KB
Line 
1"""This module defines the scantable class."""
2
3import os
4import re
5import tempfile
6import numpy
7try:
8 from functools import wraps as wraps_dec
9except ImportError:
10 from asap.compatibility import wraps as wraps_dec
11
12from asap.env import is_casapy
13from asap._asap import Scantable
14from asap._asap import filler, msfiller
15from asap.parameters import rcParams
16from asap.logging import asaplog, asaplog_post_dec
17from asap.selector import selector
18from asap.linecatalog import linecatalog
19from asap.coordinate import coordinate
20from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
21from asap.asapfitter import fitter
22
23def preserve_selection(func):
24 @wraps_dec(func)
25 def wrap(obj, *args, **kw):
26 basesel = obj.get_selection()
27 try:
28 val = func(obj, *args, **kw)
29 finally:
30 obj.set_selection(basesel)
31 return val
32 return wrap
33
34def is_scantable(filename):
35 """Is the given file a scantable?
36
37 Parameters:
38
39 filename: the name of the file/directory to test
40
41 """
42 if ( os.path.isdir(filename)
43 and os.path.exists(filename+'/table.info')
44 and os.path.exists(filename+'/table.dat') ):
45 f=open(filename+'/table.info')
46 l=f.readline()
47 f.close()
48 match_pattern = '^Type = Scantable *$'
49 unmatch_pattern = '^Type = (MeasurementSet|Image|ApplyTable) *$'
50 #if ( l.find('Scantable') != -1 ):
51 if re.match(match_pattern,l):
52 return True
53 #elif ( l.find('Measurement Set') == -1 and
54 # l.find('Image') == -1):
55 elif re.match(unmatch_pattern,l) is None:
56 return True
57 else:
58 return False
59 else:
60 return False
61## return (os.path.isdir(filename)
62## and not os.path.exists(filename+'/table.f1')
63## and os.path.exists(filename+'/table.info'))
64
65def is_ms(filename):
66 """Is the given file a MeasurementSet?
67
68 Parameters:
69
70 filename: the name of the file/directory to test
71
72 """
73 if ( os.path.isdir(filename)
74 and os.path.exists(filename+'/table.info')
75 and os.path.exists(filename+'/table.dat') ):
76 f=open(filename+'/table.info')
77 l=f.readline()
78 f.close()
79 if ( l.find('Measurement Set') != -1 ):
80 return True
81 else:
82 return False
83 else:
84 return False
85
86def normalise_edge_param(edge):
87 """\
88 Convert a given edge value to a one-dimensional array that can be
89 given to baseline-fitting/subtraction functions.
90 The length of the output value will be an even because values for
91 the both sides of spectra are to be contained for each IF. When
92 the length is 2, the values will be applied to all IFs. If the length
93 is larger than 2, it will be 2*ifnos().
94 Accepted format of edge include:
95 * an integer - will be used for both sides of spectra of all IFs.
96 e.g. 10 is converted to [10,10]
97 * an empty list/tuple [] - converted to [0, 0] and used for all IFs.
98 * a list/tuple containing an integer - same as the above case.
99 e.g. [10] is converted to [10,10]
100 * a list/tuple containing two integers - will be used for all IFs.
101 e.g. [5,10] is output as it is. no need to convert.
102 * a list/tuple of lists/tuples containing TWO integers -
103 each element of edge will be used for each IF.
104 e.g. [[5,10],[15,20]] - [5,10] for IF[0] and [15,20] for IF[1].
105
106 If an element contains the same integer values, the input 'edge'
107 parameter can be given in a simpler shape in the following cases:
108 ** when len(edge)!=2
109 any elements containing the same values can be replaced
110 to single integers.
111 e.g. [[15,15]] can be simplified to [15] (or [15,15] or 15 also).
112 e.g. [[1,1],[2,2],[3,3]] can be simplified to [1,2,3].
113 ** when len(edge)=2
114 care is needed for this case: ONLY ONE of the
115 elements can be a single integer,
116 e.g. [[5,5],[10,10]] can be simplified to [5,[10,10]]
117 or [[5,5],10], but can NOT be simplified to [5,10].
118 when [5,10] given, it is interpreted as
119 [[5,10],[5,10],[5,10],...] instead, as shown before.
120 """
121 from asap import _is_sequence_or_number as _is_valid
122 if isinstance(edge, list) or isinstance(edge, tuple):
123 for edgepar in edge:
124 if not _is_valid(edgepar, int):
125 raise ValueError, "Each element of the 'edge' tuple has \
126 to be a pair of integers or an integer."
127 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
128 if len(edgepar) != 2:
129 raise ValueError, "Each element of the 'edge' tuple has \
130 to be a pair of integers or an integer."
131 else:
132 if not _is_valid(edge, int):
133 raise ValueError, "Parameter 'edge' has to be an integer or a \
134 pair of integers specified as a tuple. \
135 Nested tuples are allowed \
136 to make individual selection for different IFs."
137
138
139 if isinstance(edge, int):
140 edge = [ edge, edge ] # e.g. 3 => [3,3]
141 elif isinstance(edge, list) or isinstance(edge, tuple):
142 if len(edge) == 0:
143 edge = [0, 0] # e.g. [] => [0,0]
144 elif len(edge) == 1:
145 if isinstance(edge[0], int):
146 edge = [ edge[0], edge[0] ] # e.g. [1] => [1,1]
147
148 commonedge = True
149 if len(edge) > 2: commonedge = False
150 else:
151 for edgepar in edge:
152 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
153 commonedge = False
154 break
155
156 if commonedge:
157 if len(edge) > 1:
158 norm_edge = edge
159 else:
160 norm_edge = edge + edge
161 else:
162 norm_edge = []
163 for edgepar in edge:
164 if isinstance(edgepar, int):
165 norm_edge += [edgepar, edgepar]
166 else:
167 norm_edge += edgepar
168
169 return norm_edge
170
171def raise_fitting_failure_exception(e):
172 msg = "The fit failed, possibly because it didn't converge."
173 if rcParams["verbose"]:
174 asaplog.push(str(e))
175 asaplog.push(str(msg))
176 else:
177 raise RuntimeError(str(e)+'\n'+msg)
178
179def pack_progress_params(showprogress, minnrow):
180 return str(showprogress).lower() + ',' + str(minnrow)
181
182class scantable(Scantable):
183 """\
184 The ASAP container for scans (single-dish data).
185 """
186
187 @asaplog_post_dec
188 def __init__(self, filename, average=None, unit=None, parallactify=None,
189 **args):
190 """\
191 Create a scantable from a saved one or make a reference
192
193 Parameters:
194
195 filename: the name of an asap table on disk
196 or
197 the name of a rpfits/sdfits/ms file
198 (integrations within scans are auto averaged
199 and the whole file is read) or
200 [advanced] a reference to an existing scantable
201
202 average: average all integrations withinb a scan on read.
203 The default (True) is taken from .asaprc.
204
205 unit: brightness unit; must be consistent with K or Jy.
206 Over-rides the default selected by the filler
207 (input rpfits/sdfits/ms) or replaces the value
208 in existing scantables
209
210 antenna: for MeasurementSet input data only:
211 Antenna selection. integer (id) or string
212 (name or id).
213
214 parallactify: Indicate that the data had been parallactified.
215 Default (false) is taken from rc file.
216
217 """
218 if average is None:
219 average = rcParams['scantable.autoaverage']
220 parallactify = parallactify or rcParams['scantable.parallactify']
221 varlist = vars()
222 from asap._asap import stmath
223 self._math = stmath( rcParams['insitu'] )
224 if isinstance(filename, Scantable):
225 Scantable.__init__(self, filename)
226 else:
227 if isinstance(filename, str):
228 filename = os.path.expandvars(filename)
229 filename = os.path.expanduser(filename)
230 if not os.path.exists(filename):
231 s = "File '%s' not found." % (filename)
232 raise IOError(s)
233 if is_scantable(filename):
234 ondisk = rcParams['scantable.storage'] == 'disk'
235 Scantable.__init__(self, filename, ondisk)
236 if unit is not None:
237 self.set_fluxunit(unit)
238 if average:
239 self._assign( self.average_time( scanav=True ) )
240 # do not reset to the default freqframe
241 #self.set_freqframe(rcParams['scantable.freqframe'])
242 elif is_ms(filename):
243 # Measurement Set
244 opts={'ms': {}}
245 mskeys=['getpt','antenna']
246 for key in mskeys:
247 if key in args.keys():
248 opts['ms'][key] = args[key]
249 self._fill([filename], unit, average, opts)
250 elif os.path.isfile(filename):
251 self._fill([filename], unit, average)
252 # only apply to new data not "copy constructor"
253 self.parallactify(parallactify)
254 else:
255 msg = "The given file '%s'is not a valid " \
256 "asap table." % (filename)
257 raise IOError(msg)
258 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
259 and isinstance(filename[-1], str):
260 self._fill(filename, unit, average)
261 self.parallactify(parallactify)
262 self._add_history("scantable", varlist)
263
264 @asaplog_post_dec
265 def save(self, name=None, format=None, overwrite=False):
266 """\
267 Store the scantable on disk. This can be an asap (aips++) Table,
268 SDFITS or MS2 format.
269
270 Parameters:
271
272 name: the name of the outputfile. For format 'ASCII'
273 this is the root file name (data in 'name'.txt
274 and header in 'name'_header.txt)
275
276 format: an optional file format. Default is ASAP.
277 Allowed are:
278
279 * 'ASAP' (save as ASAP [aips++] Table),
280 * 'SDFITS' (save as SDFITS file)
281 * 'ASCII' (saves as ascii text file)
282 * 'MS2' (saves as an casacore MeasurementSet V2)
283 * 'FITS' (save as image FITS - not readable by
284 class)
285 * 'CLASS' (save as FITS readable by CLASS)
286
287 overwrite: If the file should be overwritten if it exists.
288 The default False is to return with warning
289 without writing the output. USE WITH CARE.
290
291 Example::
292
293 scan.save('myscan.asap')
294 scan.save('myscan.sdfits', 'SDFITS')
295
296 """
297 from os import path
298 format = format or rcParams['scantable.save']
299 suffix = '.'+format.lower()
300 if name is None or name == "":
301 name = 'scantable'+suffix
302 msg = "No filename given. Using default name %s..." % name
303 asaplog.push(msg)
304 name = path.expandvars(name)
305 if path.isfile(name) or path.isdir(name):
306 if not overwrite:
307 msg = "File %s exists." % name
308 raise IOError(msg)
309 format2 = format.upper()
310 if format2 == 'ASAP':
311 self._save(name)
312 elif format2 == 'MS2':
313 msopt = {'ms': {'overwrite': overwrite } }
314 from asap._asap import mswriter
315 writer = mswriter( self )
316 writer.write( name, msopt )
317 else:
318 from asap._asap import stwriter as stw
319 writer = stw(format2)
320 writer.write(self, name)
321 return
322
323 def copy(self):
324 """Return a copy of this scantable.
325
326 *Note*:
327
328 This makes a full (deep) copy. scan2 = scan1 makes a reference.
329
330 Example::
331
332 copiedscan = scan.copy()
333
334 """
335 sd = scantable(Scantable._copy(self))
336 return sd
337
338 def drop_scan(self, scanid=None):
339 """\
340 Return a new scantable where the specified scan number(s) has(have)
341 been dropped.
342
343 Parameters:
344
345 scanid: a (list of) scan number(s)
346
347 """
348 from asap import _is_sequence_or_number as _is_valid
349 from asap import _to_list
350 from asap import unique
351 if not _is_valid(scanid):
352 raise RuntimeError( 'Please specify a scanno to drop from the'
353 ' scantable' )
354 scanid = _to_list(scanid)
355 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
356 for sid in scanid: allscans.remove(sid)
357 if len(allscans) == 0:
358 raise ValueError("Can't remove all scans")
359 sel = selector(scans=allscans)
360 return self._select_copy(sel)
361
362 def _select_copy(self, selection):
363 orig = self.get_selection()
364 self.set_selection(orig+selection)
365 cp = self.copy()
366 self.set_selection(orig)
367 return cp
368
369 def get_scan(self, scanid=None):
370 """\
371 Return a specific scan (by scanno) or collection of scans (by
372 source name) in a new scantable.
373
374 *Note*:
375
376 See scantable.drop_scan() for the inverse operation.
377
378 Parameters:
379
380 scanid: a (list of) scanno or a source name, unix-style
381 patterns are accepted for source name matching, e.g.
382 '*_R' gets all 'ref scans
383
384 Example::
385
386 # get all scans containing the source '323p459'
387 newscan = scan.get_scan('323p459')
388 # get all 'off' scans
389 refscans = scan.get_scan('*_R')
390 # get a susbset of scans by scanno (as listed in scan.summary())
391 newscan = scan.get_scan([0, 2, 7, 10])
392
393 """
394 if scanid is None:
395 raise RuntimeError( 'Please specify a scan no or name to '
396 'retrieve from the scantable' )
397 try:
398 bsel = self.get_selection()
399 sel = selector()
400 if type(scanid) is str:
401 sel.set_name(scanid)
402 return self._select_copy(sel)
403 elif type(scanid) is int:
404 sel.set_scans([scanid])
405 return self._select_copy(sel)
406 elif type(scanid) is list:
407 sel.set_scans(scanid)
408 return self._select_copy(sel)
409 else:
410 msg = "Illegal scanid type, use 'int' or 'list' if ints."
411 raise TypeError(msg)
412 except RuntimeError:
413 raise
414
415 def __str__(self):
416 tempFile = tempfile.NamedTemporaryFile()
417 Scantable._summary(self, tempFile.name)
418 tempFile.seek(0)
419 asaplog.clear()
420 return tempFile.file.read()
421
422 @asaplog_post_dec
423 def summary(self, filename=None):
424 """\
425 Print a summary of the contents of this scantable.
426
427 Parameters:
428
429 filename: the name of a file to write the putput to
430 Default - no file output
431
432 """
433 if filename is not None:
434 if filename is "":
435 filename = 'scantable_summary.txt'
436 from os.path import expandvars, isdir
437 filename = expandvars(filename)
438 if isdir(filename):
439 msg = "Illegal file name '%s'." % (filename)
440 raise IOError(msg)
441 else:
442 filename = ""
443 Scantable._summary(self, filename)
444
445 def get_spectrum(self, rowno):
446 """Return the spectrum for the current row in the scantable as a list.
447
448 Parameters:
449
450 rowno: the row number to retrieve the spectrum from
451
452 """
453 return self._getspectrum(rowno)
454
455 def get_mask(self, rowno):
456 """Return the mask for the current row in the scantable as a list.
457
458 Parameters:
459
460 rowno: the row number to retrieve the mask from
461
462 """
463 return self._getmask(rowno)
464
465 def set_spectrum(self, spec, rowno):
466 """Set the spectrum for the current row in the scantable.
467
468 Parameters:
469
470 spec: the new spectrum
471
472 rowno: the row number to set the spectrum for
473
474 """
475 assert(len(spec) == self.nchan(self.getif(rowno)))
476 return self._setspectrum(spec, rowno)
477
478 def get_coordinate(self, rowno):
479 """Return the (spectral) coordinate for a a given 'rowno'.
480
481 *Note*:
482
483 * This coordinate is only valid until a scantable method modifies
484 the frequency axis.
485 * This coordinate does contain the original frequency set-up
486 NOT the new frame. The conversions however are done using the user
487 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
488 use scantable.freq_align first. Without it there is no closure,
489 i.e.::
490
491 c = myscan.get_coordinate(0)
492 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
493
494 Parameters:
495
496 rowno: the row number for the spectral coordinate
497
498 """
499 return coordinate(Scantable.get_coordinate(self, rowno))
500
501 def get_selection(self):
502 """\
503 Get the selection object currently set on this scantable.
504
505 Example::
506
507 sel = scan.get_selection()
508 sel.set_ifs(0) # select IF 0
509 scan.set_selection(sel) # apply modified selection
510
511 """
512 return selector(self._getselection())
513
514 def set_selection(self, selection=None, **kw):
515 """\
516 Select a subset of the data. All following operations on this scantable
517 are only applied to thi selection.
518
519 Parameters:
520
521 selection: a selector object (default unset the selection), or
522 any combination of 'pols', 'ifs', 'beams', 'scans',
523 'cycles', 'name', 'query'
524
525 Examples::
526
527 sel = selector() # create a selection object
528 self.set_scans([0, 3]) # select SCANNO 0 and 3
529 scan.set_selection(sel) # set the selection
530 scan.summary() # will only print summary of scanno 0 an 3
531 scan.set_selection() # unset the selection
532 # or the equivalent
533 scan.set_selection(scans=[0,3])
534 scan.summary() # will only print summary of scanno 0 an 3
535 scan.set_selection() # unset the selection
536
537 """
538 if selection is None:
539 # reset
540 if len(kw) == 0:
541 selection = selector()
542 else:
543 # try keywords
544 for k in kw:
545 if k not in selector.fields:
546 raise KeyError("Invalid selection key '%s', "
547 "valid keys are %s" % (k,
548 selector.fields))
549 selection = selector(**kw)
550 self._setselection(selection)
551
552 def get_row(self, row=0, insitu=None):
553 """\
554 Select a row in the scantable.
555 Return a scantable with single row.
556
557 Parameters:
558
559 row: row no of integration, default is 0.
560 insitu: if False a new scantable is returned. Otherwise, the
561 scaling is done in-situ. The default is taken from .asaprc
562 (False)
563
564 """
565 if insitu is None:
566 insitu = rcParams['insitu']
567 if not insitu:
568 workscan = self.copy()
569 else:
570 workscan = self
571 # Select a row
572 sel = selector()
573 sel.set_rows([row])
574 workscan.set_selection(sel)
575 if not workscan.nrow() == 1:
576 msg = "Could not identify single row. %d rows selected." \
577 % (workscan.nrow())
578 raise RuntimeError(msg)
579 if insitu:
580 self._assign(workscan)
581 else:
582 return workscan
583
584 @asaplog_post_dec
585 def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
586 """\
587 Determine the specified statistic of the current beam/if/pol
588 Takes a 'mask' as an optional parameter to specify which
589 channels should be excluded.
590
591 Parameters:
592
593 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
594 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
595
596 mask: an optional mask specifying where the statistic
597 should be determined.
598
599 form: format string to print statistic values
600
601 row: row number of spectrum to process.
602 (default is None: for all rows)
603
604 Example:
605 scan.set_unit('channel')
606 msk = scan.create_mask([100, 200], [500, 600])
607 scan.stats(stat='mean', mask=m)
608
609 """
610 mask = mask or []
611 if not self._check_ifs():
612 raise ValueError("Cannot apply mask as the IFs have different "
613 "number of channels. Please use setselection() "
614 "to select individual IFs")
615 rtnabc = False
616 if stat.lower().endswith('_abc'): rtnabc = True
617 getchan = False
618 if stat.lower().startswith('min') or stat.lower().startswith('max'):
619 chan = self._math._minmaxchan(self, mask, stat)
620 getchan = True
621 statvals = []
622 if not rtnabc:
623 if row == None:
624 statvals = self._math._stats(self, mask, stat)
625 else:
626 statvals = self._math._statsrow(self, mask, stat, int(row))
627
628 #def cb(i):
629 # return statvals[i]
630
631 #return self._row_callback(cb, stat)
632
633 label=stat
634 #callback=cb
635 out = ""
636 #outvec = []
637 sep = '-'*50
638
639 if row == None:
640 rows = xrange(self.nrow())
641 elif isinstance(row, int):
642 rows = [ row ]
643
644 for i in rows:
645 refstr = ''
646 statunit= ''
647 if getchan:
648 qx, qy = self.chan2data(rowno=i, chan=chan[i])
649 if rtnabc:
650 statvals.append(qx['value'])
651 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
652 statunit= '['+qx['unit']+']'
653 else:
654 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
655
656 tm = self._gettime(i)
657 src = self._getsourcename(i)
658 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
659 out += 'Time[%s]:\n' % (tm)
660 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (self.getbeam(i))
661 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
662 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
663 #outvec.append(callback(i))
664 if len(rows) > 1:
665 # out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
666 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
667 else:
668 # out += ('= %'+form) % (outvec[0]) +' '+refstr+'\n'
669 out += ('= %'+form) % (statvals[0]) +' '+refstr+'\n'
670 out += sep+"\n"
671
672 import os
673 if os.environ.has_key( 'USER' ):
674 usr = os.environ['USER']
675 else:
676 import commands
677 usr = commands.getoutput( 'whoami' )
678 tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
679 f = open(tmpfile,'w')
680 print >> f, sep
681 print >> f, ' %s %s' % (label, statunit)
682 print >> f, sep
683 print >> f, out
684 f.close()
685 f = open(tmpfile,'r')
686 x = f.readlines()
687 f.close()
688 asaplog.push(''.join(x), False)
689
690 return statvals
691
692 def chan2data(self, rowno=0, chan=0):
693 """\
694 Returns channel/frequency/velocity and spectral value
695 at an arbitrary row and channel in the scantable.
696
697 Parameters:
698
699 rowno: a row number in the scantable. Default is the
700 first row, i.e. rowno=0
701
702 chan: a channel in the scantable. Default is the first
703 channel, i.e. pos=0
704
705 """
706 if isinstance(rowno, int) and isinstance(chan, int):
707 qx = {'unit': self.get_unit(),
708 'value': self._getabcissa(rowno)[chan]}
709 qy = {'unit': self.get_fluxunit(),
710 'value': self._getspectrum(rowno)[chan]}
711 return qx, qy
712
713 def stddev(self, mask=None):
714 """\
715 Determine the standard deviation of the current beam/if/pol
716 Takes a 'mask' as an optional parameter to specify which
717 channels should be excluded.
718
719 Parameters:
720
721 mask: an optional mask specifying where the standard
722 deviation should be determined.
723
724 Example::
725
726 scan.set_unit('channel')
727 msk = scan.create_mask([100, 200], [500, 600])
728 scan.stddev(mask=m)
729
730 """
731 return self.stats(stat='stddev', mask=mask);
732
733
734 def get_column_names(self):
735 """\
736 Return a list of column names, which can be used for selection.
737 """
738 return list(Scantable.get_column_names(self))
739
740 def get_tsys(self, row=-1):
741 """\
742 Return the System temperatures.
743
744 Parameters:
745
746 row: the rowno to get the information for. (default all rows)
747
748 Returns:
749
750 a list of Tsys values for the current selection
751
752 """
753 if row > -1:
754 return self._get_column(self._gettsys, row)
755 return self._row_callback(self._gettsys, "Tsys")
756
757 def get_tsysspectrum(self, row=-1):
758 """\
759 Return the channel dependent system temperatures.
760
761 Parameters:
762
763 row: the rowno to get the information for. (default all rows)
764
765 Returns:
766
767 a list of Tsys values for the current selection
768
769 """
770 return self._get_column( self._gettsysspectrum, row )
771
772 def get_weather(self, row=-1):
773 """\
774 Return the weather informations.
775
776 Parameters:
777
778 row: the rowno to get the information for. (default all rows)
779
780 Returns:
781
782 a dict or list of of dicts of values for the current selection
783
784 """
785
786 values = self._get_column(self._get_weather, row)
787 if row > -1:
788 return {'temperature': values[0],
789 'pressure': values[1], 'humidity' : values[2],
790 'windspeed' : values[3], 'windaz' : values[4]
791 }
792 else:
793 out = []
794 for r in values:
795
796 out.append({'temperature': r[0],
797 'pressure': r[1], 'humidity' : r[2],
798 'windspeed' : r[3], 'windaz' : r[4]
799 })
800 return out
801
802 def _row_callback(self, callback, label):
803 out = ""
804 outvec = []
805 sep = '-'*50
806 for i in range(self.nrow()):
807 tm = self._gettime(i)
808 src = self._getsourcename(i)
809 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
810 out += 'Time[%s]:\n' % (tm)
811 if self.nbeam(-1) > 1:
812 out += ' Beam[%d] ' % (self.getbeam(i))
813 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
814 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
815 outvec.append(callback(i))
816 out += '= %3.3f\n' % (outvec[i])
817 out += sep+'\n'
818
819 asaplog.push(sep)
820 asaplog.push(" %s" % (label))
821 asaplog.push(sep)
822 asaplog.push(out)
823 asaplog.post()
824 return outvec
825
826 def _get_column(self, callback, row=-1, *args):
827 """
828 """
829 if row == -1:
830 return [callback(i, *args) for i in range(self.nrow())]
831 else:
832 if 0 <= row < self.nrow():
833 return callback(row, *args)
834
835
836 def get_time(self, row=-1, asdatetime=False, prec=-1):
837 """\
838 Get a list of time stamps for the observations.
839 Return a datetime object or a string (default) for each
840 integration time stamp in the scantable.
841
842 Parameters:
843
844 row: row no of integration. Default -1 return all rows
845
846 asdatetime: return values as datetime objects rather than strings
847
848 prec: number of digits shown. Default -1 to automatic
849 calculation.
850 Note this number is equals to the digits of MVTime,
851 i.e., 0<prec<3: dates with hh:: only,
852 <5: with hh:mm:, <7 or 0: with hh:mm:ss,
853 and 6> : with hh:mm:ss.tt... (prec-6 t's added)
854
855 """
856 from datetime import datetime
857 if prec < 0:
858 # automagically set necessary precision +1
859 prec = 7 - \
860 numpy.floor(numpy.log10(numpy.min(self.get_inttime(row))))
861 prec = max(6, int(prec))
862 else:
863 prec = max(0, prec)
864 if asdatetime:
865 #precision can be 1 millisecond at max
866 prec = min(12, prec)
867
868 times = self._get_column(self._gettime, row, prec)
869 if not asdatetime:
870 return times
871 format = "%Y/%m/%d/%H:%M:%S.%f"
872 if prec < 7:
873 nsub = 1 + (((6-prec)/2) % 3)
874 substr = [".%f","%S","%M"]
875 for i in range(nsub):
876 format = format.replace(substr[i],"")
877 if isinstance(times, list):
878 return [datetime.strptime(i, format) for i in times]
879 else:
880 return datetime.strptime(times, format)
881
882
883 def get_inttime(self, row=-1):
884 """\
885 Get a list of integration times for the observations.
886 Return a time in seconds for each integration in the scantable.
887
888 Parameters:
889
890 row: row no of integration. Default -1 return all rows.
891
892 """
893 return self._get_column(self._getinttime, row)
894
895
896 def get_sourcename(self, row=-1):
897 """\
898 Get a list source names for the observations.
899 Return a string for each integration in the scantable.
900 Parameters:
901
902 row: row no of integration. Default -1 return all rows.
903
904 """
905 return self._get_column(self._getsourcename, row)
906
907 def get_elevation(self, row=-1):
908 """\
909 Get a list of elevations for the observations.
910 Return a float for each integration in the scantable.
911
912 Parameters:
913
914 row: row no of integration. Default -1 return all rows.
915
916 """
917 return self._get_column(self._getelevation, row)
918
919 def get_azimuth(self, row=-1):
920 """\
921 Get a list of azimuths for the observations.
922 Return a float for each integration in the scantable.
923
924 Parameters:
925 row: row no of integration. Default -1 return all rows.
926
927 """
928 return self._get_column(self._getazimuth, row)
929
930 def get_parangle(self, row=-1):
931 """\
932 Get a list of parallactic angles for the observations.
933 Return a float for each integration in the scantable.
934
935 Parameters:
936
937 row: row no of integration. Default -1 return all rows.
938
939 """
940 return self._get_column(self._getparangle, row)
941
942 def get_direction(self, row=-1):
943 """
944 Get a list of Positions on the sky (direction) for the observations.
945 Return a string for each integration in the scantable.
946
947 Parameters:
948
949 row: row no of integration. Default -1 return all rows
950
951 """
952 return self._get_column(self._getdirection, row)
953
954 def get_directionval(self, row=-1):
955 """\
956 Get a list of Positions on the sky (direction) for the observations.
957 Return a float for each integration in the scantable.
958
959 Parameters:
960
961 row: row no of integration. Default -1 return all rows
962
963 """
964 return self._get_column(self._getdirectionvec, row)
965
966 @asaplog_post_dec
967 def set_unit(self, unit='channel'):
968 """\
969 Set the unit for all following operations on this scantable
970
971 Parameters:
972
973 unit: optional unit, default is 'channel'. Use one of '*Hz',
974 'km/s', 'channel' or equivalent ''
975
976 """
977 varlist = vars()
978 if unit in ['', 'pixel', 'channel']:
979 unit = ''
980 inf = list(self._getcoordinfo())
981 inf[0] = unit
982 self._setcoordinfo(inf)
983 self._add_history("set_unit", varlist)
984
985 @asaplog_post_dec
986 def set_instrument(self, instr):
987 """\
988 Set the instrument for subsequent processing.
989
990 Parameters:
991
992 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
993 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
994
995 """
996 self._setInstrument(instr)
997 self._add_history("set_instument", vars())
998
999 @asaplog_post_dec
1000 def set_feedtype(self, feedtype):
1001 """\
1002 Overwrite the feed type, which might not be set correctly.
1003
1004 Parameters:
1005
1006 feedtype: 'linear' or 'circular'
1007
1008 """
1009 self._setfeedtype(feedtype)
1010 self._add_history("set_feedtype", vars())
1011
1012 @asaplog_post_dec
1013 def set_doppler(self, doppler='RADIO'):
1014 """\
1015 Set the doppler for all following operations on this scantable.
1016
1017 Parameters:
1018
1019 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
1020
1021 """
1022 varlist = vars()
1023 inf = list(self._getcoordinfo())
1024 inf[2] = doppler
1025 self._setcoordinfo(inf)
1026 self._add_history("set_doppler", vars())
1027
1028 @asaplog_post_dec
1029 def set_freqframe(self, frame=None):
1030 """\
1031 Set the frame type of the Spectral Axis.
1032
1033 Parameters:
1034
1035 frame: an optional frame type, default 'LSRK'. Valid frames are:
1036 'TOPO', 'LSRD', 'LSRK', 'BARY',
1037 'GEO', 'GALACTO', 'LGROUP', 'CMB'
1038
1039 Example::
1040
1041 scan.set_freqframe('BARY')
1042
1043 """
1044 frame = frame or rcParams['scantable.freqframe']
1045 varlist = vars()
1046 # "REST" is not implemented in casacore
1047 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
1048 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1049 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
1050 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1051
1052 if frame in valid:
1053 inf = list(self._getcoordinfo())
1054 inf[1] = frame
1055 self._setcoordinfo(inf)
1056 self._add_history("set_freqframe", varlist)
1057 else:
1058 msg = "Please specify a valid freq type. Valid types are:\n", valid
1059 raise TypeError(msg)
1060
1061 @asaplog_post_dec
1062 def set_dirframe(self, frame=""):
1063 """\
1064 Set the frame type of the Direction on the sky.
1065
1066 Parameters:
1067
1068 frame: an optional frame type, default ''. Valid frames are:
1069 'J2000', 'B1950', 'GALACTIC'
1070
1071 Example:
1072
1073 scan.set_dirframe('GALACTIC')
1074
1075 """
1076 varlist = vars()
1077 Scantable.set_dirframe(self, frame)
1078 self._add_history("set_dirframe", varlist)
1079
1080 def get_unit(self):
1081 """\
1082 Get the default unit set in this scantable
1083
1084 Returns:
1085
1086 A unit string
1087
1088 """
1089 inf = self._getcoordinfo()
1090 unit = inf[0]
1091 if unit == '': unit = 'channel'
1092 return unit
1093
1094 @asaplog_post_dec
1095 def get_abcissa(self, rowno=0):
1096 """\
1097 Get the abcissa in the current coordinate setup for the currently
1098 selected Beam/IF/Pol
1099
1100 Parameters:
1101
1102 rowno: an optional row number in the scantable. Default is the
1103 first row, i.e. rowno=0
1104
1105 Returns:
1106
1107 The abcissa values and the format string (as a dictionary)
1108
1109 """
1110 abc = self._getabcissa(rowno)
1111 lbl = self._getabcissalabel(rowno)
1112 return abc, lbl
1113
1114 @asaplog_post_dec
1115 def flag(self, mask=None, unflag=False, row=-1):
1116 """\
1117 Flag the selected data using an optional channel mask.
1118
1119 Parameters:
1120
1121 mask: an optional channel mask, created with create_mask. Default
1122 (no mask) is all channels.
1123
1124 unflag: if True, unflag the data
1125
1126 row: an optional row number in the scantable.
1127 Default -1 flags all rows
1128
1129 """
1130 varlist = vars()
1131 mask = mask or []
1132 self._flag(row, mask, unflag)
1133 self._add_history("flag", varlist)
1134
1135 @asaplog_post_dec
1136 def flag_row(self, rows=None, unflag=False):
1137 """\
1138 Flag the selected data in row-based manner.
1139
1140 Parameters:
1141
1142 rows: list of row numbers to be flagged. Default is no row
1143 (must be explicitly specified to execute row-based
1144 flagging).
1145
1146 unflag: if True, unflag the data.
1147
1148 """
1149 varlist = vars()
1150 if rows is None:
1151 rows = []
1152 self._flag_row(rows, unflag)
1153 self._add_history("flag_row", varlist)
1154
1155 @asaplog_post_dec
1156 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
1157 """\
1158 Flag the selected data outside a specified range (in channel-base)
1159
1160 Parameters:
1161
1162 uthres: upper threshold.
1163
1164 dthres: lower threshold
1165
1166 clipoutside: True for flagging data outside the range
1167 [dthres:uthres].
1168 False for flagging data inside the range.
1169
1170 unflag: if True, unflag the data.
1171
1172 """
1173 varlist = vars()
1174 self._clip(uthres, dthres, clipoutside, unflag)
1175 self._add_history("clip", varlist)
1176
1177 @asaplog_post_dec
1178 def lag_flag(self, start, end, unit="MHz", insitu=None):
1179 """\
1180 Flag the data in 'lag' space by providing a frequency to remove.
1181 Flagged data in the scantable get interpolated over the region.
1182 No taper is applied.
1183
1184 Parameters:
1185
1186 start: the start frequency (really a period within the
1187 bandwidth) or period to remove
1188
1189 end: the end frequency or period to remove
1190
1191 unit: the frequency unit (default 'MHz') or '' for
1192 explicit lag channels
1193
1194 *Notes*:
1195
1196 It is recommended to flag edges of the band or strong
1197 signals beforehand.
1198
1199 """
1200 if insitu is None: insitu = rcParams['insitu']
1201 self._math._setinsitu(insitu)
1202 varlist = vars()
1203 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1204 if not (unit == "" or base.has_key(unit)):
1205 raise ValueError("%s is not a valid unit." % unit)
1206 if unit == "":
1207 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1208 else:
1209 s = scantable(self._math._lag_flag(self, start*base[unit],
1210 end*base[unit], "frequency"))
1211 s._add_history("lag_flag", varlist)
1212 if insitu:
1213 self._assign(s)
1214 else:
1215 return s
1216
1217 @asaplog_post_dec
1218 def fft(self, rowno=None, mask=None, getrealimag=False):
1219 """\
1220 Apply FFT to the spectra.
1221 Flagged data in the scantable get interpolated over the region.
1222
1223 Parameters:
1224
1225 rowno: The row number(s) to be processed. int, list
1226 and tuple are accepted. By default (None), FFT
1227 is applied to the whole data.
1228
1229 mask: Auxiliary channel mask(s). Given as a boolean
1230 list, it is applied to all specified rows.
1231 A list of boolean lists can also be used to
1232 apply different masks. In the latter case, the
1233 length of 'mask' must be the same as that of
1234 'rowno'. The default is None.
1235
1236 getrealimag: If True, returns the real and imaginary part
1237 values of the complex results.
1238 If False (the default), returns the amplitude
1239 (absolute value) normalised with Ndata/2 and
1240 phase (argument, in unit of radian).
1241
1242 Returns:
1243
1244 A list of dictionaries containing the results for each spectrum.
1245 Each dictionary contains two values, the real and the imaginary
1246 parts when getrealimag = True, or the amplitude(absolute value)
1247 and the phase(argument) when getrealimag = False. The key for
1248 these values are 'real' and 'imag', or 'ampl' and 'phase',
1249 respectively.
1250 """
1251 if rowno is None:
1252 rowno = []
1253 if isinstance(rowno, int):
1254 rowno = [rowno]
1255 elif not (isinstance(rowno, list) or isinstance(rowno, tuple)):
1256 raise TypeError("The row number(s) must be int, list or tuple.")
1257 if len(rowno) == 0: rowno = [i for i in xrange(self.nrow())]
1258
1259 usecommonmask = True
1260
1261 if mask is None:
1262 mask = []
1263 if isinstance(mask, list) or isinstance(mask, tuple):
1264 if len(mask) == 0:
1265 mask = [[]]
1266 else:
1267 if isinstance(mask[0], bool):
1268 if len(mask) != self.nchan(self.getif(rowno[0])):
1269 raise ValueError("The spectra and the mask have "
1270 "different length.")
1271 mask = [mask]
1272 elif isinstance(mask[0], list) or isinstance(mask[0], tuple):
1273 usecommonmask = False
1274 if len(mask) != len(rowno):
1275 raise ValueError("When specifying masks for each "
1276 "spectrum, the numbers of them "
1277 "must be identical.")
1278 for i in xrange(mask):
1279 if len(mask[i]) != self.nchan(self.getif(rowno[i])):
1280 raise ValueError("The spectra and the mask have "
1281 "different length.")
1282 else:
1283 raise TypeError("The mask must be a boolean list or "
1284 "a list of boolean list.")
1285 else:
1286 raise TypeError("The mask must be a boolean list or a list of "
1287 "boolean list.")
1288
1289 res = []
1290
1291 imask = 0
1292 for whichrow in rowno:
1293 fspec = self._fft(whichrow, mask[imask], getrealimag)
1294 nspec = len(fspec)
1295
1296 i = 0
1297 v1 = []
1298 v2 = []
1299 reselem = {"real":[],"imag":[]} if getrealimag \
1300 else {"ampl":[],"phase":[]}
1301
1302 while (i < nspec):
1303 v1.append(fspec[i])
1304 v2.append(fspec[i+1])
1305 i += 2
1306
1307 if getrealimag:
1308 reselem["real"] += v1
1309 reselem["imag"] += v2
1310 else:
1311 reselem["ampl"] += v1
1312 reselem["phase"] += v2
1313
1314 res.append(reselem)
1315
1316 if not usecommonmask:
1317 imask += 1
1318
1319 return res
1320
1321 @asaplog_post_dec
1322 def create_mask(self, *args, **kwargs):
1323 """\
1324 Compute and return a mask based on [min, max] windows.
1325 The specified windows are to be INCLUDED, when the mask is
1326 applied.
1327
1328 Parameters:
1329
1330 [min, max], [min2, max2], ...
1331 Pairs of start/end points (inclusive)specifying the regions
1332 to be masked
1333
1334 invert: optional argument. If specified as True,
1335 return an inverted mask, i.e. the regions
1336 specified are EXCLUDED
1337
1338 row: create the mask using the specified row for
1339 unit conversions, default is row=0
1340 only necessary if frequency varies over rows.
1341
1342 Examples::
1343
1344 scan.set_unit('channel')
1345 # a)
1346 msk = scan.create_mask([400, 500], [800, 900])
1347 # masks everything outside 400 and 500
1348 # and 800 and 900 in the unit 'channel'
1349
1350 # b)
1351 msk = scan.create_mask([400, 500], [800, 900], invert=True)
1352 # masks the regions between 400 and 500
1353 # and 800 and 900 in the unit 'channel'
1354
1355 # c)
1356 #mask only channel 400
1357 msk = scan.create_mask([400])
1358
1359 """
1360 row = kwargs.get("row", 0)
1361 data = self._getabcissa(row)
1362 u = self._getcoordinfo()[0]
1363 if u == "":
1364 u = "channel"
1365 msg = "The current mask window unit is %s" % u
1366 i = self._check_ifs()
1367 if not i:
1368 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1369 asaplog.push(msg)
1370 n = len(data)
1371 msk = _n_bools(n, False)
1372 # test if args is a 'list' or a 'normal *args - UGLY!!!
1373
1374 ws = (isinstance(args[-1][-1], int)
1375 or isinstance(args[-1][-1], float)) and args or args[0]
1376 for window in ws:
1377 if len(window) == 1:
1378 window = [window[0], window[0]]
1379 if len(window) == 0 or len(window) > 2:
1380 raise ValueError("A window needs to be defined as "
1381 "[start(, end)]")
1382 if window[0] > window[1]:
1383 tmp = window[0]
1384 window[0] = window[1]
1385 window[1] = tmp
1386 for i in range(n):
1387 if data[i] >= window[0] and data[i] <= window[1]:
1388 msk[i] = True
1389 if kwargs.has_key('invert'):
1390 if kwargs.get('invert'):
1391 msk = mask_not(msk)
1392 return msk
1393
1394 def get_masklist(self, mask=None, row=0, silent=False):
1395 """\
1396 Compute and return a list of mask windows, [min, max].
1397
1398 Parameters:
1399
1400 mask: channel mask, created with create_mask.
1401
1402 row: calcutate the masklist using the specified row
1403 for unit conversions, default is row=0
1404 only necessary if frequency varies over rows.
1405
1406 Returns:
1407
1408 [min, max], [min2, max2], ...
1409 Pairs of start/end points (inclusive)specifying
1410 the masked regions
1411
1412 """
1413 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1414 raise TypeError("The mask should be list or tuple.")
1415 if len(mask) <= 0:
1416 raise TypeError("The mask elements should be > 0")
1417 data = self._getabcissa(row)
1418 if len(data) != len(mask):
1419 msg = "Number of channels in scantable != number of mask elements"
1420 raise TypeError(msg)
1421 u = self._getcoordinfo()[0]
1422 if u == "":
1423 u = "channel"
1424 msg = "The current mask window unit is %s" % u
1425 i = self._check_ifs()
1426 if not i:
1427 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1428 if not silent:
1429 asaplog.push(msg)
1430 masklist = []
1431 ist, ien = None, None
1432 ist, ien=self.get_mask_indices(mask)
1433 if ist is not None and ien is not None:
1434 for i in xrange(len(ist)):
1435 range=[data[ist[i]],data[ien[i]]]
1436 range.sort()
1437 masklist.append([range[0],range[1]])
1438 return masklist
1439
1440 def get_mask_indices(self, mask=None):
1441 """\
1442 Compute and Return lists of mask start indices and mask end indices.
1443
1444 Parameters:
1445
1446 mask: channel mask, created with create_mask.
1447
1448 Returns:
1449
1450 List of mask start indices and that of mask end indices,
1451 i.e., [istart1,istart2,....], [iend1,iend2,....].
1452
1453 """
1454 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1455 raise TypeError("The mask should be list or tuple.")
1456 if len(mask) <= 0:
1457 raise TypeError("The mask elements should be > 0")
1458 istart = []
1459 iend = []
1460 if mask[0]:
1461 istart.append(0)
1462 for i in range(len(mask)-1):
1463 if not mask[i] and mask[i+1]:
1464 istart.append(i+1)
1465 elif mask[i] and not mask[i+1]:
1466 iend.append(i)
1467 if mask[len(mask)-1]:
1468 iend.append(len(mask)-1)
1469 if len(istart) != len(iend):
1470 raise RuntimeError("Numbers of mask start != mask end.")
1471 for i in range(len(istart)):
1472 if istart[i] > iend[i]:
1473 raise RuntimeError("Mask start index > mask end index")
1474 break
1475 return istart,iend
1476
1477 @asaplog_post_dec
1478 def parse_maskexpr(self, maskstring):
1479 """
1480 Parse CASA type mask selection syntax (IF dependent).
1481
1482 Parameters:
1483 maskstring : A string mask selection expression.
1484 A comma separated selections mean different IF -
1485 channel combinations. IFs and channel selections
1486 are partitioned by a colon, ':'.
1487 examples:
1488 '' = all IFs (all channels)
1489 '<2,4~6,9' = IFs 0,1,4,5,6,9 (all channels)
1490 '3:3~45;60' = channels 3 to 45 and 60 in IF 3
1491 '0~1:2~6,8' = channels 2 to 6 in IFs 0,1, and
1492 all channels in IF8
1493 Returns:
1494 A dictionary of selected (valid) IF and masklist pairs,
1495 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
1496 """
1497 if not isinstance(maskstring,str):
1498 asaplog.post()
1499 asaplog.push("Mask expression should be a string.")
1500 asaplog.post("ERROR")
1501
1502 valid_ifs = self.getifnos()
1503 frequnit = self.get_unit()
1504 seldict = {}
1505 if maskstring == "":
1506 maskstring = str(valid_ifs)[1:-1]
1507 ## split each selection "IF range[:CHAN range]"
1508 sellist = maskstring.split(',')
1509 for currselstr in sellist:
1510 selset = currselstr.split(':')
1511 # spw and mask string (may include ~, < or >)
1512 spwmasklist = self._parse_selection(selset[0], typestr='integer',
1513 minval=min(valid_ifs),
1514 maxval=max(valid_ifs))
1515 for spwlist in spwmasklist:
1516 selspws = []
1517 for ispw in range(spwlist[0],spwlist[1]+1):
1518 # Put into the list only if ispw exists
1519 if valid_ifs.count(ispw):
1520 selspws.append(ispw)
1521 del spwmasklist, spwlist
1522
1523 # parse frequency mask list
1524 if len(selset) > 1:
1525 freqmasklist = self._parse_selection(selset[1], typestr='float',
1526 offset=0.)
1527 else:
1528 # want to select the whole spectrum
1529 freqmasklist = [None]
1530
1531 ## define a dictionary of spw - masklist combination
1532 for ispw in selspws:
1533 #print "working on", ispw
1534 spwstr = str(ispw)
1535 if len(selspws) == 0:
1536 # empty spw
1537 continue
1538 else:
1539 ## want to get min and max of the spw and
1540 ## offset to set for '<' and '>'
1541 if frequnit == 'channel':
1542 minfreq = 0
1543 maxfreq = self.nchan(ifno=ispw)
1544 offset = 0.5
1545 else:
1546 ## This is ugly part. need improvement
1547 for ifrow in xrange(self.nrow()):
1548 if self.getif(ifrow) == ispw:
1549 #print "IF",ispw,"found in row =",ifrow
1550 break
1551 freqcoord = self.get_coordinate(ifrow)
1552 freqs = self._getabcissa(ifrow)
1553 minfreq = min(freqs)
1554 maxfreq = max(freqs)
1555 if len(freqs) == 1:
1556 offset = 0.5
1557 elif frequnit.find('Hz') > 0:
1558 offset = abs(freqcoord.to_frequency(1,
1559 unit=frequnit)
1560 -freqcoord.to_frequency(0,
1561 unit=frequnit)
1562 )*0.5
1563 elif frequnit.find('m/s') > 0:
1564 offset = abs(freqcoord.to_velocity(1,
1565 unit=frequnit)
1566 -freqcoord.to_velocity(0,
1567 unit=frequnit)
1568 )*0.5
1569 else:
1570 asaplog.post()
1571 asaplog.push("Invalid frequency unit")
1572 asaplog.post("ERROR")
1573 del freqs, freqcoord, ifrow
1574 for freq in freqmasklist:
1575 selmask = freq or [minfreq, maxfreq]
1576 if selmask[0] == None:
1577 ## selection was "<freq[1]".
1578 if selmask[1] < minfreq:
1579 ## avoid adding region selection
1580 selmask = None
1581 else:
1582 selmask = [minfreq,selmask[1]-offset]
1583 elif selmask[1] == None:
1584 ## selection was ">freq[0]"
1585 if selmask[0] > maxfreq:
1586 ## avoid adding region selection
1587 selmask = None
1588 else:
1589 selmask = [selmask[0]+offset,maxfreq]
1590 if selmask:
1591 if not seldict.has_key(spwstr):
1592 # new spw selection
1593 seldict[spwstr] = []
1594 seldict[spwstr] += [selmask]
1595 del minfreq,maxfreq,offset,freq,selmask
1596 del spwstr
1597 del freqmasklist
1598 del valid_ifs
1599 if len(seldict) == 0:
1600 asaplog.post()
1601 asaplog.push("No valid selection in the mask expression: "
1602 +maskstring)
1603 asaplog.post("WARN")
1604 return None
1605 msg = "Selected masklist:\n"
1606 for sif, lmask in seldict.iteritems():
1607 msg += " IF"+sif+" - "+str(lmask)+"\n"
1608 asaplog.push(msg)
1609 return seldict
1610
1611 @asaplog_post_dec
1612 def parse_idx_selection(self, mode, selexpr):
1613 """
1614 Parse CASA type mask selection syntax of SCANNO, IFNO, POLNO,
1615 BEAMNO, and row number
1616
1617 Parameters:
1618 mode : which column to select.
1619 ['scan',|'if'|'pol'|'beam'|'row']
1620 selexpr : A comma separated selection expression.
1621 examples:
1622 '' = all (returns [])
1623 '<2,4~6,9' = indices less than 2, 4 to 6 and 9
1624 (returns [0,1,4,5,6,9])
1625 Returns:
1626 A List of selected indices
1627 """
1628 if selexpr == "":
1629 return []
1630 valid_modes = {'s': 'scan', 'i': 'if', 'p': 'pol',
1631 'b': 'beam', 'r': 'row'}
1632 smode = mode.lower()[0]
1633 if not (smode in valid_modes.keys()):
1634 msg = "Invalid mode '%s'. Valid modes are %s" %\
1635 (mode, str(valid_modes.values()))
1636 asaplog.post()
1637 asaplog.push(msg)
1638 asaplog.post("ERROR")
1639 mode = valid_modes[smode]
1640 minidx = None
1641 maxidx = None
1642 if smode == 'r':
1643 minidx = 0
1644 maxidx = self.nrow()
1645 else:
1646 idx = getattr(self,"get"+mode+"nos")()
1647 minidx = min(idx)
1648 maxidx = max(idx)
1649 del idx
1650 sellist = selexpr.split(',')
1651 idxlist = []
1652 for currselstr in sellist:
1653 # single range (may include ~, < or >)
1654 currlist = self._parse_selection(currselstr, typestr='integer',
1655 minval=minidx,maxval=maxidx)
1656 for thelist in currlist:
1657 idxlist += range(thelist[0],thelist[1]+1)
1658 msg = "Selected %s: %s" % (mode.upper()+"NO", str(idxlist))
1659 asaplog.push(msg)
1660 return idxlist
1661
1662 def _parse_selection(self, selstr, typestr='float', offset=0.,
1663 minval=None, maxval=None):
1664 """
1665 Parameters:
1666 selstr : The Selection string, e.g., '<3;5~7;100~103;9'
1667 typestr : The type of the values in returned list
1668 ('integer' or 'float')
1669 offset : The offset value to subtract from or add to
1670 the boundary value if the selection string
1671 includes '<' or '>' [Valid only for typestr='float']
1672 minval, maxval : The minimum/maximum values to set if the
1673 selection string includes '<' or '>'.
1674 The list element is filled with None by default.
1675 Returns:
1676 A list of min/max pair of selections.
1677 Example:
1678 _parse_selection('<3;5~7;9',typestr='int',minval=0)
1679 --> returns [[0,2],[5,7],[9,9]]
1680 _parse_selection('<3;5~7;9',typestr='float',offset=0.5,minval=0)
1681 --> returns [[0.,2.5],[5.0,7.0],[9.,9.]]
1682 """
1683 selgroups = selstr.split(';')
1684 sellists = []
1685 if typestr.lower().startswith('int'):
1686 formatfunc = int
1687 offset = 1
1688 else:
1689 formatfunc = float
1690
1691 for currsel in selgroups:
1692 if currsel.find('~') > 0:
1693 # val0 <= x <= val1
1694 minsel = formatfunc(currsel.split('~')[0].strip())
1695 maxsel = formatfunc(currsel.split('~')[1].strip())
1696 elif currsel.strip().find('<=') > -1:
1697 bound = currsel.split('<=')
1698 try: # try "x <= val"
1699 minsel = minval
1700 maxsel = formatfunc(bound[1].strip())
1701 except ValueError: # now "val <= x"
1702 minsel = formatfunc(bound[0].strip())
1703 maxsel = maxval
1704 elif currsel.strip().find('>=') > -1:
1705 bound = currsel.split('>=')
1706 try: # try "x >= val"
1707 minsel = formatfunc(bound[1].strip())
1708 maxsel = maxval
1709 except ValueError: # now "val >= x"
1710 minsel = minval
1711 maxsel = formatfunc(bound[0].strip())
1712 elif currsel.strip().find('<') > -1:
1713 bound = currsel.split('<')
1714 try: # try "x < val"
1715 minsel = minval
1716 maxsel = formatfunc(bound[1].strip()) \
1717 - formatfunc(offset)
1718 except ValueError: # now "val < x"
1719 minsel = formatfunc(bound[0].strip()) \
1720 + formatfunc(offset)
1721 maxsel = maxval
1722 elif currsel.strip().find('>') > -1:
1723 bound = currsel.split('>')
1724 try: # try "x > val"
1725 minsel = formatfunc(bound[1].strip()) \
1726 + formatfunc(offset)
1727 maxsel = maxval
1728 except ValueError: # now "val > x"
1729 minsel = minval
1730 maxsel = formatfunc(bound[0].strip()) \
1731 - formatfunc(offset)
1732 else:
1733 minsel = formatfunc(currsel)
1734 maxsel = formatfunc(currsel)
1735 sellists.append([minsel,maxsel])
1736 return sellists
1737
1738# def get_restfreqs(self):
1739# """
1740# Get the restfrequency(s) stored in this scantable.
1741# The return value(s) are always of unit 'Hz'
1742# Parameters:
1743# none
1744# Returns:
1745# a list of doubles
1746# """
1747# return list(self._getrestfreqs())
1748
1749 def get_restfreqs(self, ids=None):
1750 """\
1751 Get the restfrequency(s) stored in this scantable.
1752 The return value(s) are always of unit 'Hz'
1753
1754 Parameters:
1755
1756 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1757 be retrieved
1758
1759 Returns:
1760
1761 dictionary containing ids and a list of doubles for each id
1762
1763 """
1764 if ids is None:
1765 rfreqs = {}
1766 idlist = self.getmolnos()
1767 for i in idlist:
1768 rfreqs[i] = list(self._getrestfreqs(i))
1769 return rfreqs
1770 else:
1771 if type(ids) == list or type(ids) == tuple:
1772 rfreqs = {}
1773 for i in ids:
1774 rfreqs[i] = list(self._getrestfreqs(i))
1775 return rfreqs
1776 else:
1777 return list(self._getrestfreqs(ids))
1778
1779 @asaplog_post_dec
1780 def set_restfreqs(self, freqs=None, unit='Hz'):
1781 """\
1782 Set or replace the restfrequency specified and
1783 if the 'freqs' argument holds a scalar,
1784 then that rest frequency will be applied to all the selected
1785 data. If the 'freqs' argument holds
1786 a vector, then it MUST be of equal or smaller length than
1787 the number of IFs (and the available restfrequencies will be
1788 replaced by this vector). In this case, *all* data have
1789 the restfrequency set per IF according
1790 to the corresponding value you give in the 'freqs' vector.
1791 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
1792 IF 1 gets restfreq 2e9.
1793
1794 You can also specify the frequencies via a linecatalog.
1795
1796 Parameters:
1797
1798 freqs: list of rest frequency values or string idenitfiers
1799
1800 unit: unit for rest frequency (default 'Hz')
1801
1802
1803 Example::
1804
1805 # set the given restfrequency for the all currently selected IFs
1806 scan.set_restfreqs(freqs=1.4e9)
1807 # set restfrequencies for the n IFs (n > 1) in the order of the
1808 # list, i.e
1809 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
1810 # len(list_of_restfreqs) == nIF
1811 # for nIF == 1 the following will set multiple restfrequency for
1812 # that IF
1813 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
1814 # set multiple restfrequencies per IF. as a list of lists where
1815 # the outer list has nIF elements, the inner s arbitrary
1816 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
1817
1818 *Note*:
1819
1820 To do more sophisticate Restfrequency setting, e.g. on a
1821 source and IF basis, use scantable.set_selection() before using
1822 this function::
1823
1824 # provided your scantable is called scan
1825 selection = selector()
1826 selection.set_name('ORION*')
1827 selection.set_ifs([1])
1828 scan.set_selection(selection)
1829 scan.set_restfreqs(freqs=86.6e9)
1830
1831 """
1832 varlist = vars()
1833 from asap import linecatalog
1834 # simple value
1835 if isinstance(freqs, int) or isinstance(freqs, float):
1836 self._setrestfreqs([freqs], [""], unit)
1837 # list of values
1838 elif isinstance(freqs, list) or isinstance(freqs, tuple):
1839 # list values are scalars
1840 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
1841 if len(freqs) == 1:
1842 self._setrestfreqs(freqs, [""], unit)
1843 else:
1844 # allow the 'old' mode of setting mulitple IFs
1845 savesel = self._getselection()
1846 sel = self.get_selection()
1847 iflist = self.getifnos()
1848 if len(freqs)>len(iflist):
1849 raise ValueError("number of elements in list of list "
1850 "exeeds the current IF selections")
1851 iflist = self.getifnos()
1852 for i, fval in enumerate(freqs):
1853 sel.set_ifs(iflist[i])
1854 self._setselection(sel)
1855 self._setrestfreqs([fval], [""], unit)
1856 self._setselection(savesel)
1857
1858 # list values are dict, {'value'=, 'name'=)
1859 elif isinstance(freqs[-1], dict):
1860 values = []
1861 names = []
1862 for d in freqs:
1863 values.append(d["value"])
1864 names.append(d["name"])
1865 self._setrestfreqs(values, names, unit)
1866 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
1867 savesel = self._getselection()
1868 sel = self.get_selection()
1869 iflist = self.getifnos()
1870 if len(freqs)>len(iflist):
1871 raise ValueError("number of elements in list of list exeeds"
1872 " the current IF selections")
1873 for i, fval in enumerate(freqs):
1874 sel.set_ifs(iflist[i])
1875 self._setselection(sel)
1876 self._setrestfreqs(fval, [""], unit)
1877 self._setselection(savesel)
1878 # freqs are to be taken from a linecatalog
1879 elif isinstance(freqs, linecatalog):
1880 savesel = self._getselection()
1881 sel = self.get_selection()
1882 for i in xrange(freqs.nrow()):
1883 sel.set_ifs(iflist[i])
1884 self._setselection(sel)
1885 self._setrestfreqs([freqs.get_frequency(i)],
1886 [freqs.get_name(i)], "MHz")
1887 # ensure that we are not iterating past nIF
1888 if i == self.nif()-1: break
1889 self._setselection(savesel)
1890 else:
1891 return
1892 self._add_history("set_restfreqs", varlist)
1893
1894 @asaplog_post_dec
1895 def shift_refpix(self, delta):
1896 """\
1897 Shift the reference pixel of the Spectra Coordinate by an
1898 integer amount.
1899
1900 Parameters:
1901
1902 delta: the amount to shift by
1903
1904 *Note*:
1905
1906 Be careful using this with broadband data.
1907
1908 """
1909 varlist = vars()
1910 Scantable.shift_refpix(self, delta)
1911 s._add_history("shift_refpix", varlist)
1912
1913 @asaplog_post_dec
1914 def history(self, filename=None):
1915 """\
1916 Print the history. Optionally to a file.
1917
1918 Parameters:
1919
1920 filename: The name of the file to save the history to.
1921
1922 """
1923 hist = list(self._gethistory())
1924 out = "-"*80
1925 for h in hist:
1926 if h.startswith("---"):
1927 out = "\n".join([out, h])
1928 else:
1929 items = h.split("##")
1930 date = items[0]
1931 func = items[1]
1932 items = items[2:]
1933 out += "\n"+date+"\n"
1934 out += "Function: %s\n Parameters:" % (func)
1935 for i in items:
1936 if i == '':
1937 continue
1938 s = i.split("=")
1939 out += "\n %s = %s" % (s[0], s[1])
1940 out = "\n".join([out, "-"*80])
1941 if filename is not None:
1942 if filename is "":
1943 filename = 'scantable_history.txt'
1944 import os
1945 filename = os.path.expandvars(os.path.expanduser(filename))
1946 if not os.path.isdir(filename):
1947 data = open(filename, 'w')
1948 data.write(out)
1949 data.close()
1950 else:
1951 msg = "Illegal file name '%s'." % (filename)
1952 raise IOError(msg)
1953 return page(out)
1954
1955 #
1956 # Maths business
1957 #
1958 @asaplog_post_dec
1959 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
1960 """\
1961 Return the (time) weighted average of a scan. Scans will be averaged
1962 only if the source direction (RA/DEC) is within 1' otherwise
1963
1964 *Note*:
1965
1966 in channels only - align if necessary
1967
1968 Parameters:
1969
1970 mask: an optional mask (only used for 'var' and 'tsys'
1971 weighting)
1972
1973 scanav: True averages each scan separately
1974 False (default) averages all scans together,
1975
1976 weight: Weighting scheme.
1977 'none' (mean no weight)
1978 'var' (1/var(spec) weighted)
1979 'tsys' (1/Tsys**2 weighted)
1980 'tint' (integration time weighted)
1981 'tintsys' (Tint/Tsys**2)
1982 'median' ( median averaging)
1983 The default is 'tint'
1984
1985 align: align the spectra in velocity before averaging. It takes
1986 the time of the first spectrum as reference time.
1987
1988 Example::
1989
1990 # time average the scantable without using a mask
1991 newscan = scan.average_time()
1992
1993 """
1994 varlist = vars()
1995 weight = weight or 'TINT'
1996 mask = mask or ()
1997 scanav = (scanav and 'SCAN') or 'NONE'
1998 scan = (self, )
1999
2000 if align:
2001 scan = (self.freq_align(insitu=False), )
2002 s = None
2003 if weight.upper() == 'MEDIAN':
2004 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
2005 scanav))
2006 else:
2007 s = scantable(self._math._average(scan, mask, weight.upper(),
2008 scanav))
2009 s._add_history("average_time", varlist)
2010 return s
2011
2012 @asaplog_post_dec
2013 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
2014 """\
2015 Return a scan where all spectra are converted to either
2016 Jansky or Kelvin depending upon the flux units of the scan table.
2017 By default the function tries to look the values up internally.
2018 If it can't find them (or if you want to over-ride), you must
2019 specify EITHER jyperk OR eta (and D which it will try to look up
2020 also if you don't set it). jyperk takes precedence if you set both.
2021
2022 Parameters:
2023
2024 jyperk: the Jy / K conversion factor
2025
2026 eta: the aperture efficiency
2027
2028 d: the geometric diameter (metres)
2029
2030 insitu: if False a new scantable is returned.
2031 Otherwise, the scaling is done in-situ
2032 The default is taken from .asaprc (False)
2033
2034 """
2035 if insitu is None: insitu = rcParams['insitu']
2036 self._math._setinsitu(insitu)
2037 varlist = vars()
2038 jyperk = jyperk or -1.0
2039 d = d or -1.0
2040 eta = eta or -1.0
2041 s = scantable(self._math._convertflux(self, d, eta, jyperk))
2042 s._add_history("convert_flux", varlist)
2043 if insitu: self._assign(s)
2044 else: return s
2045
2046 @asaplog_post_dec
2047 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
2048 """\
2049 Return a scan after applying a gain-elevation correction.
2050 The correction can be made via either a polynomial or a
2051 table-based interpolation (and extrapolation if necessary).
2052 You specify polynomial coefficients, an ascii table or neither.
2053 If you specify neither, then a polynomial correction will be made
2054 with built in coefficients known for certain telescopes (an error
2055 will occur if the instrument is not known).
2056 The data and Tsys are *divided* by the scaling factors.
2057
2058 Parameters:
2059
2060 poly: Polynomial coefficients (default None) to compute a
2061 gain-elevation correction as a function of
2062 elevation (in degrees).
2063
2064 filename: The name of an ascii file holding correction factors.
2065 The first row of the ascii file must give the column
2066 names and these MUST include columns
2067 'ELEVATION' (degrees) and 'FACTOR' (multiply data
2068 by this) somewhere.
2069 The second row must give the data type of the
2070 column. Use 'R' for Real and 'I' for Integer.
2071 An example file would be
2072 (actual factors are arbitrary) :
2073
2074 TIME ELEVATION FACTOR
2075 R R R
2076 0.1 0 0.8
2077 0.2 20 0.85
2078 0.3 40 0.9
2079 0.4 60 0.85
2080 0.5 80 0.8
2081 0.6 90 0.75
2082
2083 method: Interpolation method when correcting from a table.
2084 Values are 'nearest', 'linear' (default), 'cubic'
2085 and 'spline'
2086
2087 insitu: if False a new scantable is returned.
2088 Otherwise, the scaling is done in-situ
2089 The default is taken from .asaprc (False)
2090
2091 """
2092
2093 if insitu is None: insitu = rcParams['insitu']
2094 self._math._setinsitu(insitu)
2095 varlist = vars()
2096 poly = poly or ()
2097 from os.path import expandvars
2098 filename = expandvars(filename)
2099 s = scantable(self._math._gainel(self, poly, filename, method))
2100 s._add_history("gain_el", varlist)
2101 if insitu:
2102 self._assign(s)
2103 else:
2104 return s
2105
2106 @asaplog_post_dec
2107 def freq_align(self, reftime=None, method='cubic', insitu=None):
2108 """\
2109 Return a scan where all rows have been aligned in frequency/velocity.
2110 The alignment frequency frame (e.g. LSRK) is that set by function
2111 set_freqframe.
2112
2113 Parameters:
2114
2115 reftime: reference time to align at. By default, the time of
2116 the first row of data is used.
2117
2118 method: Interpolation method for regridding the spectra.
2119 Choose from 'nearest', 'linear', 'cubic' (default)
2120 and 'spline'
2121
2122 insitu: if False a new scantable is returned.
2123 Otherwise, the scaling is done in-situ
2124 The default is taken from .asaprc (False)
2125
2126 """
2127 if insitu is None: insitu = rcParams["insitu"]
2128 oldInsitu = self._math._insitu()
2129 self._math._setinsitu(insitu)
2130 varlist = vars()
2131 reftime = reftime or ""
2132 s = scantable(self._math._freq_align(self, reftime, method))
2133 s._add_history("freq_align", varlist)
2134 self._math._setinsitu(oldInsitu)
2135 if insitu:
2136 self._assign(s)
2137 else:
2138 return s
2139
2140 @asaplog_post_dec
2141 def opacity(self, tau=None, insitu=None):
2142 """\
2143 Apply an opacity correction. The data
2144 and Tsys are multiplied by the correction factor.
2145
2146 Parameters:
2147
2148 tau: (list of) opacity from which the correction factor is
2149 exp(tau*ZD)
2150 where ZD is the zenith-distance.
2151 If a list is provided, it has to be of length nIF,
2152 nIF*nPol or 1 and in order of IF/POL, e.g.
2153 [opif0pol0, opif0pol1, opif1pol0 ...]
2154 if tau is `None` the opacities are determined from a
2155 model.
2156
2157 insitu: if False a new scantable is returned.
2158 Otherwise, the scaling is done in-situ
2159 The default is taken from .asaprc (False)
2160
2161 """
2162 if insitu is None:
2163 insitu = rcParams['insitu']
2164 self._math._setinsitu(insitu)
2165 varlist = vars()
2166 if not hasattr(tau, "__len__"):
2167 tau = [tau]
2168 s = scantable(self._math._opacity(self, tau))
2169 s._add_history("opacity", varlist)
2170 if insitu:
2171 self._assign(s)
2172 else:
2173 return s
2174
2175 @asaplog_post_dec
2176 def bin(self, width=5, insitu=None):
2177 """\
2178 Return a scan where all spectra have been binned up.
2179
2180 Parameters:
2181
2182 width: The bin width (default=5) in pixels
2183
2184 insitu: if False a new scantable is returned.
2185 Otherwise, the scaling is done in-situ
2186 The default is taken from .asaprc (False)
2187
2188 """
2189 if insitu is None:
2190 insitu = rcParams['insitu']
2191 self._math._setinsitu(insitu)
2192 varlist = vars()
2193 s = scantable(self._math._bin(self, width))
2194 s._add_history("bin", varlist)
2195 if insitu:
2196 self._assign(s)
2197 else:
2198 return s
2199
2200 @asaplog_post_dec
2201 def reshape(self, first, last, insitu=None):
2202 """Resize the band by providing first and last channel.
2203 This will cut off all channels outside [first, last].
2204 """
2205 if insitu is None:
2206 insitu = rcParams['insitu']
2207 varlist = vars()
2208 if last < 0:
2209 last = self.nchan()-1 + last
2210 s = None
2211 if insitu:
2212 s = self
2213 else:
2214 s = self.copy()
2215 s._reshape(first,last)
2216 s._add_history("reshape", varlist)
2217 if not insitu:
2218 return s
2219
2220 @asaplog_post_dec
2221 def resample(self, width=5, method='cubic', insitu=None):
2222 """\
2223 Return a scan where all spectra have been binned up.
2224
2225 Parameters:
2226
2227 width: The bin width (default=5) in pixels
2228
2229 method: Interpolation method when correcting from a table.
2230 Values are 'nearest', 'linear', 'cubic' (default)
2231 and 'spline'
2232
2233 insitu: if False a new scantable is returned.
2234 Otherwise, the scaling is done in-situ
2235 The default is taken from .asaprc (False)
2236
2237 """
2238 if insitu is None:
2239 insitu = rcParams['insitu']
2240 self._math._setinsitu(insitu)
2241 varlist = vars()
2242 s = scantable(self._math._resample(self, method, width))
2243 s._add_history("resample", varlist)
2244 if insitu:
2245 self._assign(s)
2246 else:
2247 return s
2248
2249 @asaplog_post_dec
2250 def average_pol(self, mask=None, weight='none'):
2251 """\
2252 Average the Polarisations together.
2253
2254 Parameters:
2255
2256 mask: An optional mask defining the region, where the
2257 averaging will be applied. The output will have all
2258 specified points masked.
2259
2260 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2261 weighted), or 'tsys' (1/Tsys**2 weighted)
2262
2263 """
2264 varlist = vars()
2265 mask = mask or ()
2266 s = scantable(self._math._averagepol(self, mask, weight.upper()))
2267 s._add_history("average_pol", varlist)
2268 return s
2269
2270 @asaplog_post_dec
2271 def average_beam(self, mask=None, weight='none'):
2272 """\
2273 Average the Beams together.
2274
2275 Parameters:
2276 mask: An optional mask defining the region, where the
2277 averaging will be applied. The output will have all
2278 specified points masked.
2279
2280 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2281 weighted), or 'tsys' (1/Tsys**2 weighted)
2282
2283 """
2284 varlist = vars()
2285 mask = mask or ()
2286 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
2287 s._add_history("average_beam", varlist)
2288 return s
2289
2290 def parallactify(self, pflag):
2291 """\
2292 Set a flag to indicate whether this data should be treated as having
2293 been 'parallactified' (total phase == 0.0)
2294
2295 Parameters:
2296
2297 pflag: Bool indicating whether to turn this on (True) or
2298 off (False)
2299
2300 """
2301 varlist = vars()
2302 self._parallactify(pflag)
2303 self._add_history("parallactify", varlist)
2304
2305 @asaplog_post_dec
2306 def convert_pol(self, poltype=None):
2307 """\
2308 Convert the data to a different polarisation type.
2309 Note that you will need cross-polarisation terms for most conversions.
2310
2311 Parameters:
2312
2313 poltype: The new polarisation type. Valid types are:
2314 'linear', 'circular', 'stokes' and 'linpol'
2315
2316 """
2317 varlist = vars()
2318 s = scantable(self._math._convertpol(self, poltype))
2319 s._add_history("convert_pol", varlist)
2320 return s
2321
2322 @asaplog_post_dec
2323 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False,
2324 insitu=None):
2325 """\
2326 Smooth the spectrum by the specified kernel (conserving flux).
2327
2328 Parameters:
2329
2330 kernel: The type of smoothing kernel. Select from
2331 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
2332 or 'poly'
2333
2334 width: The width of the kernel in pixels. For hanning this is
2335 ignored otherwise it defauls to 5 pixels.
2336 For 'gaussian' it is the Full Width Half
2337 Maximum. For 'boxcar' it is the full width.
2338 For 'rmedian' and 'poly' it is the half width.
2339
2340 order: Optional parameter for 'poly' kernel (default is 2), to
2341 specify the order of the polnomial. Ignored by all other
2342 kernels.
2343
2344 plot: plot the original and the smoothed spectra.
2345 In this each indivual fit has to be approved, by
2346 typing 'y' or 'n'
2347
2348 insitu: if False a new scantable is returned.
2349 Otherwise, the scaling is done in-situ
2350 The default is taken from .asaprc (False)
2351
2352 """
2353 if insitu is None: insitu = rcParams['insitu']
2354 self._math._setinsitu(insitu)
2355 varlist = vars()
2356
2357 if plot: orgscan = self.copy()
2358
2359 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
2360 s._add_history("smooth", varlist)
2361
2362 action = 'H'
2363 if plot:
2364 from asap.asapplotter import new_asaplot
2365 theplot = new_asaplot(rcParams['plotter.gui'])
2366 from matplotlib import rc as rcp
2367 rcp('lines', linewidth=1)
2368 theplot.set_panels()
2369 ylab=s._get_ordinate_label()
2370 #theplot.palette(0,["#777777","red"])
2371 for r in xrange(s.nrow()):
2372 xsm=s._getabcissa(r)
2373 ysm=s._getspectrum(r)
2374 xorg=orgscan._getabcissa(r)
2375 yorg=orgscan._getspectrum(r)
2376 if action != "N": #skip plotting if rejecting all
2377 theplot.clear()
2378 theplot.hold()
2379 theplot.set_axes('ylabel',ylab)
2380 theplot.set_axes('xlabel',s._getabcissalabel(r))
2381 theplot.set_axes('title',s._getsourcename(r))
2382 theplot.set_line(label='Original',color="#777777")
2383 theplot.plot(xorg,yorg)
2384 theplot.set_line(label='Smoothed',color="red")
2385 theplot.plot(xsm,ysm)
2386 ### Ugly part for legend
2387 for i in [0,1]:
2388 theplot.subplots[0]['lines'].append(
2389 [theplot.subplots[0]['axes'].lines[i]]
2390 )
2391 theplot.release()
2392 ### Ugly part for legend
2393 theplot.subplots[0]['lines']=[]
2394 res = self._get_verify_action("Accept smoothing?",action)
2395 #print "IF%d, POL%d: got result = %s" %(s.getif(r),s.getpol(r),res)
2396 if r == 0: action = None
2397 #res = raw_input("Accept smoothing ([y]/n): ")
2398 if res.upper() == 'N':
2399 # reject for the current rows
2400 s._setspectrum(yorg, r)
2401 elif res.upper() == 'R':
2402 # reject all the following rows
2403 action = "N"
2404 s._setspectrum(yorg, r)
2405 elif res.upper() == 'A':
2406 # accept all the following rows
2407 break
2408 theplot.quit()
2409 del theplot
2410 del orgscan
2411
2412 if insitu: self._assign(s)
2413 else: return s
2414
2415 @asaplog_post_dec
2416 def regrid_channel(self, width=5, plot=False, insitu=None):
2417 """\
2418 Regrid the spectra by the specified channel width
2419
2420 Parameters:
2421
2422 width: The channel width (float) of regridded spectra
2423 in the current spectral unit.
2424
2425 plot: [NOT IMPLEMENTED YET]
2426 plot the original and the regridded spectra.
2427 In this each indivual fit has to be approved, by
2428 typing 'y' or 'n'
2429
2430 insitu: if False a new scantable is returned.
2431 Otherwise, the scaling is done in-situ
2432 The default is taken from .asaprc (False)
2433
2434 """
2435 if insitu is None: insitu = rcParams['insitu']
2436 varlist = vars()
2437
2438 if plot:
2439 asaplog.post()
2440 asaplog.push("Verification plot is not implemtnetd yet.")
2441 asaplog.post("WARN")
2442
2443 s = self.copy()
2444 s._regrid_specchan(width)
2445
2446 s._add_history("regrid_channel", varlist)
2447
2448# if plot:
2449# from asap.asapplotter import new_asaplot
2450# theplot = new_asaplot(rcParams['plotter.gui'])
2451# from matplotlib import rc as rcp
2452# rcp('lines', linewidth=1)
2453# theplot.set_panels()
2454# ylab=s._get_ordinate_label()
2455# #theplot.palette(0,["#777777","red"])
2456# for r in xrange(s.nrow()):
2457# xsm=s._getabcissa(r)
2458# ysm=s._getspectrum(r)
2459# xorg=orgscan._getabcissa(r)
2460# yorg=orgscan._getspectrum(r)
2461# theplot.clear()
2462# theplot.hold()
2463# theplot.set_axes('ylabel',ylab)
2464# theplot.set_axes('xlabel',s._getabcissalabel(r))
2465# theplot.set_axes('title',s._getsourcename(r))
2466# theplot.set_line(label='Original',color="#777777")
2467# theplot.plot(xorg,yorg)
2468# theplot.set_line(label='Smoothed',color="red")
2469# theplot.plot(xsm,ysm)
2470# ### Ugly part for legend
2471# for i in [0,1]:
2472# theplot.subplots[0]['lines'].append(
2473# [theplot.subplots[0]['axes'].lines[i]]
2474# )
2475# theplot.release()
2476# ### Ugly part for legend
2477# theplot.subplots[0]['lines']=[]
2478# res = raw_input("Accept smoothing ([y]/n): ")
2479# if res.upper() == 'N':
2480# s._setspectrum(yorg, r)
2481# theplot.quit()
2482# del theplot
2483# del orgscan
2484
2485 if insitu: self._assign(s)
2486 else: return s
2487
2488 @asaplog_post_dec
2489 def _parse_wn(self, wn):
2490 if isinstance(wn, list) or isinstance(wn, tuple):
2491 return wn
2492 elif isinstance(wn, int):
2493 return [ wn ]
2494 elif isinstance(wn, str):
2495 if '-' in wn: # case 'a-b' : return [a,a+1,...,b-1,b]
2496 val = wn.split('-')
2497 val = [int(val[0]), int(val[1])]
2498 val.sort()
2499 res = [i for i in xrange(val[0], val[1]+1)]
2500 elif wn[:2] == '<=' or wn[:2] == '=<': # cases '<=a','=<a' : return [0,1,...,a-1,a]
2501 val = int(wn[2:])+1
2502 res = [i for i in xrange(val)]
2503 elif wn[-2:] == '>=' or wn[-2:] == '=>': # cases 'a>=','a=>' : return [0,1,...,a-1,a]
2504 val = int(wn[:-2])+1
2505 res = [i for i in xrange(val)]
2506 elif wn[0] == '<': # case '<a' : return [0,1,...,a-2,a-1]
2507 val = int(wn[1:])
2508 res = [i for i in xrange(val)]
2509 elif wn[-1] == '>': # case 'a>' : return [0,1,...,a-2,a-1]
2510 val = int(wn[:-1])
2511 res = [i for i in xrange(val)]
2512 elif wn[:2] == '>=' or wn[:2] == '=>': # cases '>=a','=>a' : return [a,-999], which is
2513 # then interpreted in C++
2514 # side as [a,a+1,...,a_nyq]
2515 # (CAS-3759)
2516 val = int(wn[2:])
2517 res = [val, -999]
2518 #res = [i for i in xrange(val, self.nchan()/2+1)]
2519 elif wn[-2:] == '<=' or wn[-2:] == '=<': # cases 'a<=','a=<' : return [a,-999], which is
2520 # then interpreted in C++
2521 # side as [a,a+1,...,a_nyq]
2522 # (CAS-3759)
2523 val = int(wn[:-2])
2524 res = [val, -999]
2525 #res = [i for i in xrange(val, self.nchan()/2+1)]
2526 elif wn[0] == '>': # case '>a' : return [a+1,-999], which is
2527 # then interpreted in C++
2528 # side as [a+1,a+2,...,a_nyq]
2529 # (CAS-3759)
2530 val = int(wn[1:])+1
2531 res = [val, -999]
2532 #res = [i for i in xrange(val, self.nchan()/2+1)]
2533 elif wn[-1] == '<': # case 'a<' : return [a+1,-999], which is
2534 # then interpreted in C++
2535 # side as [a+1,a+2,...,a_nyq]
2536 # (CAS-3759)
2537 val = int(wn[:-1])+1
2538 res = [val, -999]
2539 #res = [i for i in xrange(val, self.nchan()/2+1)]
2540
2541 return res
2542 else:
2543 msg = 'wrong value given for addwn/rejwn'
2544 raise RuntimeError(msg)
2545
2546 @asaplog_post_dec
2547 def calc_aic(self, value=None, blfunc=None, order=None, mask=None,
2548 whichrow=None, uselinefinder=None, edge=None,
2549 threshold=None, chan_avg_limit=None):
2550 """\
2551 Calculates and returns model selection criteria for a specified
2552 baseline model and a given spectrum data.
2553 Available values include Akaike Information Criterion (AIC), the
2554 corrected Akaike Information Criterion (AICc) by Sugiura(1978),
2555 Bayesian Information Criterion (BIC) and the Generalised Cross
2556 Validation (GCV).
2557
2558 Parameters:
2559 value: name of model selection criteria to calculate.
2560 available ones include 'aic', 'aicc', 'bic' and
2561 'gcv'. default is 'aicc'.
2562 blfunc: baseline function name. available ones include
2563 'chebyshev', 'cspline' and 'sinusoid'.
2564 default is 'chebyshev'.
2565 order: parameter for basline function. actually stands for
2566 order of polynomial (order) for 'chebyshev',
2567 number of spline pieces (npiece) for 'cspline' and
2568 maximum wave number for 'sinusoid', respectively.
2569 default is 5 (which is also the default order value
2570 for [auto_]chebyshev_baseline()).
2571 mask: an optional mask. default is [].
2572 whichrow: row number. default is 0 (the first row)
2573 uselinefinder: use sd.linefinder() to flag out line regions
2574 default is True.
2575 edge: an optional number of channel to drop at
2576 the edge of spectrum. If only one value is
2577 specified, the same number will be dropped
2578 from both sides of the spectrum. Default
2579 is to keep all channels. Nested tuples
2580 represent individual edge selection for
2581 different IFs (a number of spectral channels
2582 can be different)
2583 default is (0, 0).
2584 threshold: the threshold used by line finder. It is
2585 better to keep it large as only strong lines
2586 affect the baseline solution.
2587 default is 3.
2588 chan_avg_limit: a maximum number of consequtive spectral
2589 channels to average during the search of
2590 weak and broad lines. The default is no
2591 averaging (and no search for weak lines).
2592 If such lines can affect the fitted baseline
2593 (e.g. a high order polynomial is fitted),
2594 increase this parameter (usually values up
2595 to 8 are reasonable). Most users of this
2596 method should find the default value sufficient.
2597 default is 1.
2598
2599 Example:
2600 aic = scan.calc_aic(blfunc='chebyshev', order=5, whichrow=0)
2601 """
2602
2603 try:
2604 varlist = vars()
2605
2606 if value is None: value = 'aicc'
2607 if blfunc is None: blfunc = 'chebyshev'
2608 if order is None: order = 5
2609 if mask is None: mask = []
2610 if whichrow is None: whichrow = 0
2611 if uselinefinder is None: uselinefinder = True
2612 if edge is None: edge = (0, 0)
2613 if threshold is None: threshold = 3
2614 if chan_avg_limit is None: chan_avg_limit = 1
2615
2616 return self._calc_aic(value, blfunc, order, mask,
2617 whichrow, uselinefinder, edge,
2618 threshold, chan_avg_limit)
2619
2620 except RuntimeError, e:
2621 raise_fitting_failure_exception(e)
2622
2623 @asaplog_post_dec
2624 def sinusoid_baseline(self, insitu=None, mask=None, applyfft=None,
2625 fftmethod=None, fftthresh=None,
2626 addwn=None, rejwn=None, clipthresh=None,
2627 clipniter=None, plot=None,
2628 getresidual=None, showprogress=None,
2629 minnrow=None, outlog=None, blfile=None, csvformat=None):
2630 """\
2631 Return a scan which has been baselined (all rows) with sinusoidal
2632 functions.
2633
2634 Parameters:
2635 insitu: if False a new scantable is returned.
2636 Otherwise, the scaling is done in-situ
2637 The default is taken from .asaprc (False)
2638 mask: an optional mask
2639 applyfft: if True use some method, such as FFT, to find
2640 strongest sinusoidal components in the wavenumber
2641 domain to be used for baseline fitting.
2642 default is True.
2643 fftmethod: method to find the strong sinusoidal components.
2644 now only 'fft' is available and it is the default.
2645 fftthresh: the threshold to select wave numbers to be used for
2646 fitting from the distribution of amplitudes in the
2647 wavenumber domain.
2648 both float and string values accepted.
2649 given a float value, the unit is set to sigma.
2650 for string values, allowed formats include:
2651 'xsigma' or 'x' (= x-sigma level. e.g.,
2652 '3sigma'), or
2653 'topx' (= the x strongest ones, e.g. 'top5').
2654 default is 3.0 (unit: sigma).
2655 addwn: the additional wave numbers to be used for fitting.
2656 list or integer value is accepted to specify every
2657 wave numbers. also string value can be used in case
2658 you need to specify wave numbers in a certain range,
2659 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
2660 '<a' (= 0,1,...,a-2,a-1),
2661 '>=a' (= a, a+1, ... up to the maximum wave
2662 number corresponding to the Nyquist
2663 frequency for the case of FFT).
2664 default is [0].
2665 rejwn: the wave numbers NOT to be used for fitting.
2666 can be set just as addwn but has higher priority:
2667 wave numbers which are specified both in addwn
2668 and rejwn will NOT be used. default is [].
2669 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2670 clipniter: maximum number of iteration of 'clipthresh'-sigma
2671 clipping (default is 0)
2672 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2673 plot the fit and the residual. In this each
2674 indivual fit has to be approved, by typing 'y'
2675 or 'n'
2676 getresidual: if False, returns best-fit values instead of
2677 residual. (default is True)
2678 showprogress: show progress status for large data.
2679 default is True.
2680 minnrow: minimum number of input spectra to show.
2681 default is 1000.
2682 outlog: Output the coefficients of the best-fit
2683 function to logger (default is False)
2684 blfile: Name of a text file in which the best-fit
2685 parameter values to be written
2686 (default is '': no file/logger output)
2687 csvformat: if True blfile is csv-formatted, default is False.
2688
2689 Example:
2690 # return a scan baselined by a combination of sinusoidal curves
2691 # having wave numbers in spectral window up to 10,
2692 # also with 3-sigma clipping, iteration up to 4 times
2693 bscan = scan.sinusoid_baseline(addwn='<=10',clipthresh=3.0,clipniter=4)
2694
2695 Note:
2696 The best-fit parameter values output in logger and/or blfile are now
2697 based on specunit of 'channel'.
2698 """
2699
2700 try:
2701 varlist = vars()
2702
2703 if insitu is None: insitu = rcParams['insitu']
2704 if insitu:
2705 workscan = self
2706 else:
2707 workscan = self.copy()
2708
2709 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
2710 if mask is None: mask = []
2711 if applyfft is None: applyfft = True
2712 if fftmethod is None: fftmethod = 'fft'
2713 if fftthresh is None: fftthresh = 3.0
2714 if addwn is None: addwn = [0]
2715 if rejwn is None: rejwn = []
2716 if clipthresh is None: clipthresh = 3.0
2717 if clipniter is None: clipniter = 0
2718 if plot is None: plot = False
2719 if getresidual is None: getresidual = True
2720 if showprogress is None: showprogress = True
2721 if minnrow is None: minnrow = 1000
2722 if outlog is None: outlog = False
2723 if blfile is None: blfile = ''
2724 if csvformat is None: csvformat = False
2725
2726 if csvformat:
2727 scsvformat = "T"
2728 else:
2729 scsvformat = "F"
2730
2731 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
2732 workscan._sinusoid_baseline(mask, applyfft, fftmethod.lower(),
2733 str(fftthresh).lower(),
2734 workscan._parse_wn(addwn),
2735 workscan._parse_wn(rejwn),
2736 clipthresh, clipniter,
2737 getresidual,
2738 pack_progress_params(showprogress,
2739 minnrow),
2740 outlog, scsvformat+blfile)
2741 workscan._add_history('sinusoid_baseline', varlist)
2742
2743 if insitu:
2744 self._assign(workscan)
2745 else:
2746 return workscan
2747
2748 except RuntimeError, e:
2749 raise_fitting_failure_exception(e)
2750
2751
2752 @asaplog_post_dec
2753 def auto_sinusoid_baseline(self, insitu=None, mask=None, applyfft=None,
2754 fftmethod=None, fftthresh=None,
2755 addwn=None, rejwn=None, clipthresh=None,
2756 clipniter=None, edge=None, threshold=None,
2757 chan_avg_limit=None, plot=None,
2758 getresidual=None, showprogress=None,
2759 minnrow=None, outlog=None, blfile=None, csvformat=None):
2760 """\
2761 Return a scan which has been baselined (all rows) with sinusoidal
2762 functions.
2763 Spectral lines are detected first using linefinder and masked out
2764 to avoid them affecting the baseline solution.
2765
2766 Parameters:
2767 insitu: if False a new scantable is returned.
2768 Otherwise, the scaling is done in-situ
2769 The default is taken from .asaprc (False)
2770 mask: an optional mask retreived from scantable
2771 applyfft: if True use some method, such as FFT, to find
2772 strongest sinusoidal components in the wavenumber
2773 domain to be used for baseline fitting.
2774 default is True.
2775 fftmethod: method to find the strong sinusoidal components.
2776 now only 'fft' is available and it is the default.
2777 fftthresh: the threshold to select wave numbers to be used for
2778 fitting from the distribution of amplitudes in the
2779 wavenumber domain.
2780 both float and string values accepted.
2781 given a float value, the unit is set to sigma.
2782 for string values, allowed formats include:
2783 'xsigma' or 'x' (= x-sigma level. e.g.,
2784 '3sigma'), or
2785 'topx' (= the x strongest ones, e.g. 'top5').
2786 default is 3.0 (unit: sigma).
2787 addwn: the additional wave numbers to be used for fitting.
2788 list or integer value is accepted to specify every
2789 wave numbers. also string value can be used in case
2790 you need to specify wave numbers in a certain range,
2791 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
2792 '<a' (= 0,1,...,a-2,a-1),
2793 '>=a' (= a, a+1, ... up to the maximum wave
2794 number corresponding to the Nyquist
2795 frequency for the case of FFT).
2796 default is [0].
2797 rejwn: the wave numbers NOT to be used for fitting.
2798 can be set just as addwn but has higher priority:
2799 wave numbers which are specified both in addwn
2800 and rejwn will NOT be used. default is [].
2801 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2802 clipniter: maximum number of iteration of 'clipthresh'-sigma
2803 clipping (default is 0)
2804 edge: an optional number of channel to drop at
2805 the edge of spectrum. If only one value is
2806 specified, the same number will be dropped
2807 from both sides of the spectrum. Default
2808 is to keep all channels. Nested tuples
2809 represent individual edge selection for
2810 different IFs (a number of spectral channels
2811 can be different)
2812 threshold: the threshold used by line finder. It is
2813 better to keep it large as only strong lines
2814 affect the baseline solution.
2815 chan_avg_limit: a maximum number of consequtive spectral
2816 channels to average during the search of
2817 weak and broad lines. The default is no
2818 averaging (and no search for weak lines).
2819 If such lines can affect the fitted baseline
2820 (e.g. a high order polynomial is fitted),
2821 increase this parameter (usually values up
2822 to 8 are reasonable). Most users of this
2823 method should find the default value sufficient.
2824 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2825 plot the fit and the residual. In this each
2826 indivual fit has to be approved, by typing 'y'
2827 or 'n'
2828 getresidual: if False, returns best-fit values instead of
2829 residual. (default is True)
2830 showprogress: show progress status for large data.
2831 default is True.
2832 minnrow: minimum number of input spectra to show.
2833 default is 1000.
2834 outlog: Output the coefficients of the best-fit
2835 function to logger (default is False)
2836 blfile: Name of a text file in which the best-fit
2837 parameter values to be written
2838 (default is "": no file/logger output)
2839 csvformat: if True blfile is csv-formatted, default is False.
2840
2841 Example:
2842 bscan = scan.auto_sinusoid_baseline(addwn='<=10', insitu=False)
2843
2844 Note:
2845 The best-fit parameter values output in logger and/or blfile are now
2846 based on specunit of 'channel'.
2847 """
2848
2849 try:
2850 varlist = vars()
2851
2852 if insitu is None: insitu = rcParams['insitu']
2853 if insitu:
2854 workscan = self
2855 else:
2856 workscan = self.copy()
2857
2858 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
2859 if mask is None: mask = []
2860 if applyfft is None: applyfft = True
2861 if fftmethod is None: fftmethod = 'fft'
2862 if fftthresh is None: fftthresh = 3.0
2863 if addwn is None: addwn = [0]
2864 if rejwn is None: rejwn = []
2865 if clipthresh is None: clipthresh = 3.0
2866 if clipniter is None: clipniter = 0
2867 if edge is None: edge = (0,0)
2868 if threshold is None: threshold = 3
2869 if chan_avg_limit is None: chan_avg_limit = 1
2870 if plot is None: plot = False
2871 if getresidual is None: getresidual = True
2872 if showprogress is None: showprogress = True
2873 if minnrow is None: minnrow = 1000
2874 if outlog is None: outlog = False
2875 if blfile is None: blfile = ''
2876 if csvformat is None: csvformat = False
2877
2878 if csvformat:
2879 scsvformat = "T"
2880 else:
2881 scsvformat = "F"
2882
2883 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
2884 workscan._auto_sinusoid_baseline(mask, applyfft,
2885 fftmethod.lower(),
2886 str(fftthresh).lower(),
2887 workscan._parse_wn(addwn),
2888 workscan._parse_wn(rejwn),
2889 clipthresh, clipniter,
2890 normalise_edge_param(edge),
2891 threshold, chan_avg_limit,
2892 getresidual,
2893 pack_progress_params(showprogress,
2894 minnrow),
2895 outlog, scsvformat+blfile)
2896 workscan._add_history("auto_sinusoid_baseline", varlist)
2897
2898 if insitu:
2899 self._assign(workscan)
2900 else:
2901 return workscan
2902
2903 except RuntimeError, e:
2904 raise_fitting_failure_exception(e)
2905
2906 @asaplog_post_dec
2907 def cspline_baseline(self, insitu=None, mask=None, npiece=None,
2908 clipthresh=None, clipniter=None, plot=None,
2909 getresidual=None, showprogress=None, minnrow=None,
2910 outlog=None, blfile=None, csvformat=None):
2911 """\
2912 Return a scan which has been baselined (all rows) by cubic spline
2913 function (piecewise cubic polynomial).
2914
2915 Parameters:
2916 insitu: If False a new scantable is returned.
2917 Otherwise, the scaling is done in-situ
2918 The default is taken from .asaprc (False)
2919 mask: An optional mask
2920 npiece: Number of pieces. (default is 2)
2921 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2922 clipniter: maximum number of iteration of 'clipthresh'-sigma
2923 clipping (default is 0)
2924 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2925 plot the fit and the residual. In this each
2926 indivual fit has to be approved, by typing 'y'
2927 or 'n'
2928 getresidual: if False, returns best-fit values instead of
2929 residual. (default is True)
2930 showprogress: show progress status for large data.
2931 default is True.
2932 minnrow: minimum number of input spectra to show.
2933 default is 1000.
2934 outlog: Output the coefficients of the best-fit
2935 function to logger (default is False)
2936 blfile: Name of a text file in which the best-fit
2937 parameter values to be written
2938 (default is "": no file/logger output)
2939 csvformat: if True blfile is csv-formatted, default is False.
2940
2941 Example:
2942 # return a scan baselined by a cubic spline consisting of 2 pieces
2943 # (i.e., 1 internal knot),
2944 # also with 3-sigma clipping, iteration up to 4 times
2945 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
2946
2947 Note:
2948 The best-fit parameter values output in logger and/or blfile are now
2949 based on specunit of 'channel'.
2950 """
2951
2952 try:
2953 varlist = vars()
2954
2955 if insitu is None: insitu = rcParams['insitu']
2956 if insitu:
2957 workscan = self
2958 else:
2959 workscan = self.copy()
2960
2961 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
2962 if mask is None: mask = []
2963 if npiece is None: npiece = 2
2964 if clipthresh is None: clipthresh = 3.0
2965 if clipniter is None: clipniter = 0
2966 if plot is None: plot = False
2967 if getresidual is None: getresidual = True
2968 if showprogress is None: showprogress = True
2969 if minnrow is None: minnrow = 1000
2970 if outlog is None: outlog = False
2971 if blfile is None: blfile = ''
2972 if csvformat is None: csvformat = False
2973
2974 if csvformat:
2975 scsvformat = "T"
2976 else:
2977 scsvformat = "F"
2978
2979 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
2980 workscan._cspline_baseline(mask, npiece, clipthresh, clipniter,
2981 getresidual,
2982 pack_progress_params(showprogress,
2983 minnrow),
2984 outlog, scsvformat+blfile)
2985 workscan._add_history("cspline_baseline", varlist)
2986
2987 if insitu:
2988 self._assign(workscan)
2989 else:
2990 return workscan
2991
2992 except RuntimeError, e:
2993 raise_fitting_failure_exception(e)
2994
2995 @asaplog_post_dec
2996 def auto_cspline_baseline(self, insitu=None, mask=None, npiece=None,
2997 clipthresh=None, clipniter=None,
2998 edge=None, threshold=None, chan_avg_limit=None,
2999 getresidual=None, plot=None,
3000 showprogress=None, minnrow=None, outlog=None,
3001 blfile=None, csvformat=None):
3002 """\
3003 Return a scan which has been baselined (all rows) by cubic spline
3004 function (piecewise cubic polynomial).
3005 Spectral lines are detected first using linefinder and masked out
3006 to avoid them affecting the baseline solution.
3007
3008 Parameters:
3009 insitu: if False a new scantable is returned.
3010 Otherwise, the scaling is done in-situ
3011 The default is taken from .asaprc (False)
3012 mask: an optional mask retreived from scantable
3013 npiece: Number of pieces. (default is 2)
3014 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3015 clipniter: maximum number of iteration of 'clipthresh'-sigma
3016 clipping (default is 0)
3017 edge: an optional number of channel to drop at
3018 the edge of spectrum. If only one value is
3019 specified, the same number will be dropped
3020 from both sides of the spectrum. Default
3021 is to keep all channels. Nested tuples
3022 represent individual edge selection for
3023 different IFs (a number of spectral channels
3024 can be different)
3025 threshold: the threshold used by line finder. It is
3026 better to keep it large as only strong lines
3027 affect the baseline solution.
3028 chan_avg_limit: a maximum number of consequtive spectral
3029 channels to average during the search of
3030 weak and broad lines. The default is no
3031 averaging (and no search for weak lines).
3032 If such lines can affect the fitted baseline
3033 (e.g. a high order polynomial is fitted),
3034 increase this parameter (usually values up
3035 to 8 are reasonable). Most users of this
3036 method should find the default value sufficient.
3037 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3038 plot the fit and the residual. In this each
3039 indivual fit has to be approved, by typing 'y'
3040 or 'n'
3041 getresidual: if False, returns best-fit values instead of
3042 residual. (default is True)
3043 showprogress: show progress status for large data.
3044 default is True.
3045 minnrow: minimum number of input spectra to show.
3046 default is 1000.
3047 outlog: Output the coefficients of the best-fit
3048 function to logger (default is False)
3049 blfile: Name of a text file in which the best-fit
3050 parameter values to be written
3051 (default is "": no file/logger output)
3052 csvformat: if True blfile is csv-formatted, default is False.
3053
3054 Example:
3055 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
3056
3057 Note:
3058 The best-fit parameter values output in logger and/or blfile are now
3059 based on specunit of 'channel'.
3060 """
3061
3062 try:
3063 varlist = vars()
3064
3065 if insitu is None: insitu = rcParams['insitu']
3066 if insitu:
3067 workscan = self
3068 else:
3069 workscan = self.copy()
3070
3071 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
3072 if mask is None: mask = []
3073 if npiece is None: npiece = 2
3074 if clipthresh is None: clipthresh = 3.0
3075 if clipniter is None: clipniter = 0
3076 if edge is None: edge = (0, 0)
3077 if threshold is None: threshold = 3
3078 if chan_avg_limit is None: chan_avg_limit = 1
3079 if plot is None: plot = False
3080 if getresidual is None: getresidual = True
3081 if showprogress is None: showprogress = True
3082 if minnrow is None: minnrow = 1000
3083 if outlog is None: outlog = False
3084 if blfile is None: blfile = ''
3085 if csvformat is None: csvformat = False
3086
3087 if csvformat:
3088 scsvformat = "T"
3089 else:
3090 scsvformat = "F"
3091
3092 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
3093 workscan._auto_cspline_baseline(mask, npiece, clipthresh,
3094 clipniter,
3095 normalise_edge_param(edge),
3096 threshold,
3097 chan_avg_limit, getresidual,
3098 pack_progress_params(showprogress,
3099 minnrow),
3100 outlog, scsvformat+blfile)
3101 workscan._add_history("auto_cspline_baseline", varlist)
3102
3103 if insitu:
3104 self._assign(workscan)
3105 else:
3106 return workscan
3107
3108 except RuntimeError, e:
3109 raise_fitting_failure_exception(e)
3110
3111 @asaplog_post_dec
3112 def chebyshev_baseline(self, insitu=None, mask=None, order=None,
3113 clipthresh=None, clipniter=None, plot=None,
3114 getresidual=None, showprogress=None, minnrow=None,
3115 outlog=None, blfile=None, csvformat=None):
3116 """\
3117 Return a scan which has been baselined (all rows) by Chebyshev polynomials.
3118
3119 Parameters:
3120 insitu: If False a new scantable is returned.
3121 Otherwise, the scaling is done in-situ
3122 The default is taken from .asaprc (False)
3123 mask: An optional mask
3124 order: the maximum order of Chebyshev polynomial (default is 5)
3125 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3126 clipniter: maximum number of iteration of 'clipthresh'-sigma
3127 clipping (default is 0)
3128 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3129 plot the fit and the residual. In this each
3130 indivual fit has to be approved, by typing 'y'
3131 or 'n'
3132 getresidual: if False, returns best-fit values instead of
3133 residual. (default is True)
3134 showprogress: show progress status for large data.
3135 default is True.
3136 minnrow: minimum number of input spectra to show.
3137 default is 1000.
3138 outlog: Output the coefficients of the best-fit
3139 function to logger (default is False)
3140 blfile: Name of a text file in which the best-fit
3141 parameter values to be written
3142 (default is "": no file/logger output)
3143 csvformat: if True blfile is csv-formatted, default is False.
3144
3145 Example:
3146 # return a scan baselined by a cubic spline consisting of 2 pieces
3147 # (i.e., 1 internal knot),
3148 # also with 3-sigma clipping, iteration up to 4 times
3149 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
3150
3151 Note:
3152 The best-fit parameter values output in logger and/or blfile are now
3153 based on specunit of 'channel'.
3154 """
3155
3156 try:
3157 varlist = vars()
3158
3159 if insitu is None: insitu = rcParams['insitu']
3160 if insitu:
3161 workscan = self
3162 else:
3163 workscan = self.copy()
3164
3165 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
3166 if mask is None: mask = []
3167 if order is None: order = 5
3168 if clipthresh is None: clipthresh = 3.0
3169 if clipniter is None: clipniter = 0
3170 if plot is None: plot = False
3171 if getresidual is None: getresidual = True
3172 if showprogress is None: showprogress = True
3173 if minnrow is None: minnrow = 1000
3174 if outlog is None: outlog = False
3175 if blfile is None: blfile = ''
3176 if csvformat is None: csvformat = False
3177
3178 if csvformat:
3179 scsvformat = "T"
3180 else:
3181 scsvformat = "F"
3182
3183 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
3184 workscan._chebyshev_baseline(mask, order, clipthresh, clipniter,
3185 getresidual,
3186 pack_progress_params(showprogress,
3187 minnrow),
3188 outlog, scsvformat+blfile)
3189 workscan._add_history("chebyshev_baseline", varlist)
3190
3191 if insitu:
3192 self._assign(workscan)
3193 else:
3194 return workscan
3195
3196 except RuntimeError, e:
3197 raise_fitting_failure_exception(e)
3198
3199 @asaplog_post_dec
3200 def auto_chebyshev_baseline(self, insitu=None, mask=None, order=None,
3201 clipthresh=None, clipniter=None,
3202 edge=None, threshold=None, chan_avg_limit=None,
3203 getresidual=None, plot=None,
3204 showprogress=None, minnrow=None, outlog=None,
3205 blfile=None, csvformat=None):
3206 """\
3207 Return a scan which has been baselined (all rows) by Chebyshev polynomials.
3208 Spectral lines are detected first using linefinder and masked out
3209 to avoid them affecting the baseline solution.
3210
3211 Parameters:
3212 insitu: if False a new scantable is returned.
3213 Otherwise, the scaling is done in-situ
3214 The default is taken from .asaprc (False)
3215 mask: an optional mask retreived from scantable
3216 order: the maximum order of Chebyshev polynomial (default is 5)
3217 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3218 clipniter: maximum number of iteration of 'clipthresh'-sigma
3219 clipping (default is 0)
3220 edge: an optional number of channel to drop at
3221 the edge of spectrum. If only one value is
3222 specified, the same number will be dropped
3223 from both sides of the spectrum. Default
3224 is to keep all channels. Nested tuples
3225 represent individual edge selection for
3226 different IFs (a number of spectral channels
3227 can be different)
3228 threshold: the threshold used by line finder. It is
3229 better to keep it large as only strong lines
3230 affect the baseline solution.
3231 chan_avg_limit: a maximum number of consequtive spectral
3232 channels to average during the search of
3233 weak and broad lines. The default is no
3234 averaging (and no search for weak lines).
3235 If such lines can affect the fitted baseline
3236 (e.g. a high order polynomial is fitted),
3237 increase this parameter (usually values up
3238 to 8 are reasonable). Most users of this
3239 method should find the default value sufficient.
3240 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3241 plot the fit and the residual. In this each
3242 indivual fit has to be approved, by typing 'y'
3243 or 'n'
3244 getresidual: if False, returns best-fit values instead of
3245 residual. (default is True)
3246 showprogress: show progress status for large data.
3247 default is True.
3248 minnrow: minimum number of input spectra to show.
3249 default is 1000.
3250 outlog: Output the coefficients of the best-fit
3251 function to logger (default is False)
3252 blfile: Name of a text file in which the best-fit
3253 parameter values to be written
3254 (default is "": no file/logger output)
3255 csvformat: if True blfile is csv-formatted, default is False.
3256
3257 Example:
3258 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
3259
3260 Note:
3261 The best-fit parameter values output in logger and/or blfile are now
3262 based on specunit of 'channel'.
3263 """
3264
3265 try:
3266 varlist = vars()
3267
3268 if insitu is None: insitu = rcParams['insitu']
3269 if insitu:
3270 workscan = self
3271 else:
3272 workscan = self.copy()
3273
3274 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
3275 if mask is None: mask = []
3276 if order is None: order = 5
3277 if clipthresh is None: clipthresh = 3.0
3278 if clipniter is None: clipniter = 0
3279 if edge is None: edge = (0, 0)
3280 if threshold is None: threshold = 3
3281 if chan_avg_limit is None: chan_avg_limit = 1
3282 if plot is None: plot = False
3283 if getresidual is None: getresidual = True
3284 if showprogress is None: showprogress = True
3285 if minnrow is None: minnrow = 1000
3286 if outlog is None: outlog = False
3287 if blfile is None: blfile = ''
3288 if csvformat is None: csvformat = False
3289
3290 if csvformat:
3291 scsvformat = "T"
3292 else:
3293 scsvformat = "F"
3294
3295 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
3296 workscan._auto_chebyshev_baseline(mask, order, clipthresh,
3297 clipniter,
3298 normalise_edge_param(edge),
3299 threshold,
3300 chan_avg_limit, getresidual,
3301 pack_progress_params(showprogress,
3302 minnrow),
3303 outlog, scsvformat+blfile)
3304 workscan._add_history("auto_chebyshev_baseline", varlist)
3305
3306 if insitu:
3307 self._assign(workscan)
3308 else:
3309 return workscan
3310
3311 except RuntimeError, e:
3312 raise_fitting_failure_exception(e)
3313
3314 @asaplog_post_dec
3315 def poly_baseline(self, mask=None, order=None, insitu=None, plot=None,
3316 getresidual=None, showprogress=None, minnrow=None,
3317 outlog=None, blfile=None, csvformat=None):
3318 """\
3319 Return a scan which has been baselined (all rows) by a polynomial.
3320 Parameters:
3321 insitu: if False a new scantable is returned.
3322 Otherwise, the scaling is done in-situ
3323 The default is taken from .asaprc (False)
3324 mask: an optional mask
3325 order: the order of the polynomial (default is 0)
3326 plot: plot the fit and the residual. In this each
3327 indivual fit has to be approved, by typing 'y'
3328 or 'n'
3329 getresidual: if False, returns best-fit values instead of
3330 residual. (default is True)
3331 showprogress: show progress status for large data.
3332 default is True.
3333 minnrow: minimum number of input spectra to show.
3334 default is 1000.
3335 outlog: Output the coefficients of the best-fit
3336 function to logger (default is False)
3337 blfile: Name of a text file in which the best-fit
3338 parameter values to be written
3339 (default is "": no file/logger output)
3340 csvformat: if True blfile is csv-formatted, default is False.
3341
3342 Example:
3343 # return a scan baselined by a third order polynomial,
3344 # not using a mask
3345 bscan = scan.poly_baseline(order=3)
3346 """
3347
3348 try:
3349 varlist = vars()
3350
3351 if insitu is None:
3352 insitu = rcParams["insitu"]
3353 if insitu:
3354 workscan = self
3355 else:
3356 workscan = self.copy()
3357
3358 #if mask is None: mask = [True for i in \
3359 # xrange(workscan.nchan())]
3360 if mask is None: mask = []
3361 if order is None: order = 0
3362 if plot is None: plot = False
3363 if getresidual is None: getresidual = True
3364 if showprogress is None: showprogress = True
3365 if minnrow is None: minnrow = 1000
3366 if outlog is None: outlog = False
3367 if blfile is None: blfile = ""
3368 if csvformat is None: csvformat = False
3369
3370 if csvformat:
3371 scsvformat = "T"
3372 else:
3373 scsvformat = "F"
3374
3375 if plot:
3376 outblfile = (blfile != "") and \
3377 os.path.exists(os.path.expanduser(
3378 os.path.expandvars(blfile))
3379 )
3380 if outblfile:
3381 blf = open(blfile, "a")
3382
3383 f = fitter()
3384 f.set_function(lpoly=order)
3385
3386 rows = xrange(workscan.nrow())
3387 #if len(rows) > 0: workscan._init_blinfo()
3388
3389 action = "H"
3390 for r in rows:
3391 f.x = workscan._getabcissa(r)
3392 f.y = workscan._getspectrum(r)
3393 if mask:
3394 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
3395 else: # mask=None
3396 f.mask = workscan._getmask(r)
3397
3398 f.data = None
3399 f.fit()
3400
3401 if action != "Y": # skip plotting when accepting all
3402 f.plot(residual=True)
3403 #accept_fit = raw_input("Accept fit ( [y]/n ): ")
3404 #if accept_fit.upper() == "N":
3405 # #workscan._append_blinfo(None, None, None)
3406 # continue
3407 accept_fit = self._get_verify_action("Accept fit?",action)
3408 if r == 0: action = None
3409 if accept_fit.upper() == "N":
3410 continue
3411 elif accept_fit.upper() == "R":
3412 break
3413 elif accept_fit.upper() == "A":
3414 action = "Y"
3415
3416 blpars = f.get_parameters()
3417 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
3418 #workscan._append_blinfo(blpars, masklist, f.mask)
3419 workscan._setspectrum((f.fitter.getresidual()
3420 if getresidual else f.fitter.getfit()), r)
3421
3422 if outblfile:
3423 rms = workscan.get_rms(f.mask, r)
3424 dataout = \
3425 workscan.format_blparams_row(blpars["params"],
3426 blpars["fixed"],
3427 rms, str(masklist),
3428 r, True, csvformat)
3429 blf.write(dataout)
3430
3431 f._p.unmap()
3432 f._p = None
3433
3434 if outblfile:
3435 blf.close()
3436 else:
3437 workscan._poly_baseline(mask, order, getresidual,
3438 pack_progress_params(showprogress,
3439 minnrow),
3440 outlog, scsvformat+blfile)
3441
3442 workscan._add_history("poly_baseline", varlist)
3443
3444 if insitu:
3445 self._assign(workscan)
3446 else:
3447 return workscan
3448
3449 except RuntimeError, e:
3450 raise_fitting_failure_exception(e)
3451
3452 @asaplog_post_dec
3453 def auto_poly_baseline(self, mask=None, order=None, edge=None,
3454 threshold=None, chan_avg_limit=None,
3455 plot=None, insitu=None,
3456 getresidual=None, showprogress=None,
3457 minnrow=None, outlog=None, blfile=None, csvformat=None):
3458 """\
3459 Return a scan which has been baselined (all rows) by a polynomial.
3460 Spectral lines are detected first using linefinder and masked out
3461 to avoid them affecting the baseline solution.
3462
3463 Parameters:
3464 insitu: if False a new scantable is returned.
3465 Otherwise, the scaling is done in-situ
3466 The default is taken from .asaprc (False)
3467 mask: an optional mask retreived from scantable
3468 order: the order of the polynomial (default is 0)
3469 edge: an optional number of channel to drop at
3470 the edge of spectrum. If only one value is
3471 specified, the same number will be dropped
3472 from both sides of the spectrum. Default
3473 is to keep all channels. Nested tuples
3474 represent individual edge selection for
3475 different IFs (a number of spectral channels
3476 can be different)
3477 threshold: the threshold used by line finder. It is
3478 better to keep it large as only strong lines
3479 affect the baseline solution.
3480 chan_avg_limit: a maximum number of consequtive spectral
3481 channels to average during the search of
3482 weak and broad lines. The default is no
3483 averaging (and no search for weak lines).
3484 If such lines can affect the fitted baseline
3485 (e.g. a high order polynomial is fitted),
3486 increase this parameter (usually values up
3487 to 8 are reasonable). Most users of this
3488 method should find the default value sufficient.
3489 plot: plot the fit and the residual. In this each
3490 indivual fit has to be approved, by typing 'y'
3491 or 'n'
3492 getresidual: if False, returns best-fit values instead of
3493 residual. (default is True)
3494 showprogress: show progress status for large data.
3495 default is True.
3496 minnrow: minimum number of input spectra to show.
3497 default is 1000.
3498 outlog: Output the coefficients of the best-fit
3499 function to logger (default is False)
3500 blfile: Name of a text file in which the best-fit
3501 parameter values to be written
3502 (default is "": no file/logger output)
3503 csvformat: if True blfile is csv-formatted, default is False.
3504
3505 Example:
3506 bscan = scan.auto_poly_baseline(order=7, insitu=False)
3507 """
3508
3509 try:
3510 varlist = vars()
3511
3512 if insitu is None:
3513 insitu = rcParams['insitu']
3514 if insitu:
3515 workscan = self
3516 else:
3517 workscan = self.copy()
3518
3519 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
3520 if mask is None: mask = []
3521 if order is None: order = 0
3522 if edge is None: edge = (0, 0)
3523 if threshold is None: threshold = 3
3524 if chan_avg_limit is None: chan_avg_limit = 1
3525 if plot is None: plot = False
3526 if getresidual is None: getresidual = True
3527 if showprogress is None: showprogress = True
3528 if minnrow is None: minnrow = 1000
3529 if outlog is None: outlog = False
3530 if blfile is None: blfile = ''
3531 if csvformat is None: csvformat = False
3532
3533 if csvformat:
3534 scsvformat = "T"
3535 else:
3536 scsvformat = "F"
3537
3538 edge = normalise_edge_param(edge)
3539
3540 if plot:
3541 outblfile = (blfile != "") and \
3542 os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
3543 if outblfile: blf = open(blfile, "a")
3544
3545 from asap.asaplinefind import linefinder
3546 fl = linefinder()
3547 fl.set_options(threshold=threshold, avg_limit=chan_avg_limit)
3548 fl.set_scan(workscan)
3549
3550 f = fitter()
3551 f.set_function(lpoly=order)
3552
3553 rows = xrange(workscan.nrow())
3554 #if len(rows) > 0: workscan._init_blinfo()
3555
3556 action = "H"
3557 for r in rows:
3558 idx = 2*workscan.getif(r)
3559 if mask:
3560 msk = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
3561 else: # mask=None
3562 msk = workscan._getmask(r)
3563 fl.find_lines(r, msk, edge[idx:idx+2])
3564
3565 f.x = workscan._getabcissa(r)
3566 f.y = workscan._getspectrum(r)
3567 f.mask = fl.get_mask()
3568 f.data = None
3569 f.fit()
3570
3571 if action != "Y": # skip plotting when accepting all
3572 f.plot(residual=True)
3573 #accept_fit = raw_input("Accept fit ( [y]/n ): ")
3574 accept_fit = self._get_verify_action("Accept fit?",action)
3575 if r == 0: action = None
3576 if accept_fit.upper() == "N":
3577 #workscan._append_blinfo(None, None, None)
3578 continue
3579 elif accept_fit.upper() == "R":
3580 break
3581 elif accept_fit.upper() == "A":
3582 action = "Y"
3583
3584 blpars = f.get_parameters()
3585 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
3586 #workscan._append_blinfo(blpars, masklist, f.mask)
3587 workscan._setspectrum(
3588 (f.fitter.getresidual() if getresidual
3589 else f.fitter.getfit()), r
3590 )
3591
3592 if outblfile:
3593 rms = workscan.get_rms(f.mask, r)
3594 dataout = \
3595 workscan.format_blparams_row(blpars["params"],
3596 blpars["fixed"],
3597 rms, str(masklist),
3598 r, True, csvformat)
3599 blf.write(dataout)
3600
3601 f._p.unmap()
3602 f._p = None
3603
3604 if outblfile: blf.close()
3605 else:
3606 workscan._auto_poly_baseline(mask, order, edge, threshold,
3607 chan_avg_limit, getresidual,
3608 pack_progress_params(showprogress,
3609 minnrow),
3610 outlog, scsvformat+blfile)
3611
3612 workscan._add_history("auto_poly_baseline", varlist)
3613
3614 if insitu:
3615 self._assign(workscan)
3616 else:
3617 return workscan
3618
3619 except RuntimeError, e:
3620 raise_fitting_failure_exception(e)
3621
3622 def _init_blinfo(self):
3623 """\
3624 Initialise the following three auxiliary members:
3625 blpars : parameters of the best-fit baseline,
3626 masklists : mask data (edge positions of masked channels) and
3627 actualmask : mask data (in boolean list),
3628 to keep for use later (including output to logger/text files).
3629 Used by poly_baseline() and auto_poly_baseline() in case of
3630 'plot=True'.
3631 """
3632 self.blpars = []
3633 self.masklists = []
3634 self.actualmask = []
3635 return
3636
3637 def _append_blinfo(self, data_blpars, data_masklists, data_actualmask):
3638 """\
3639 Append baseline-fitting related info to blpars, masklist and
3640 actualmask.
3641 """
3642 self.blpars.append(data_blpars)
3643 self.masklists.append(data_masklists)
3644 self.actualmask.append(data_actualmask)
3645 return
3646
3647 @asaplog_post_dec
3648 def rotate_linpolphase(self, angle):
3649 """\
3650 Rotate the phase of the complex polarization O=Q+iU correlation.
3651 This is always done in situ in the raw data. So if you call this
3652 function more than once then each call rotates the phase further.
3653
3654 Parameters:
3655
3656 angle: The angle (degrees) to rotate (add) by.
3657
3658 Example::
3659
3660 scan.rotate_linpolphase(2.3)
3661
3662 """
3663 varlist = vars()
3664 self._math._rotate_linpolphase(self, angle)
3665 self._add_history("rotate_linpolphase", varlist)
3666 return
3667
3668 @asaplog_post_dec
3669 def rotate_xyphase(self, angle):
3670 """\
3671 Rotate the phase of the XY correlation. This is always done in situ
3672 in the data. So if you call this function more than once
3673 then each call rotates the phase further.
3674
3675 Parameters:
3676
3677 angle: The angle (degrees) to rotate (add) by.
3678
3679 Example::
3680
3681 scan.rotate_xyphase(2.3)
3682
3683 """
3684 varlist = vars()
3685 self._math._rotate_xyphase(self, angle)
3686 self._add_history("rotate_xyphase", varlist)
3687 return
3688
3689 @asaplog_post_dec
3690 def swap_linears(self):
3691 """\
3692 Swap the linear polarisations XX and YY, or better the first two
3693 polarisations as this also works for ciculars.
3694 """
3695 varlist = vars()
3696 self._math._swap_linears(self)
3697 self._add_history("swap_linears", varlist)
3698 return
3699
3700 @asaplog_post_dec
3701 def invert_phase(self):
3702 """\
3703 Invert the phase of the complex polarisation
3704 """
3705 varlist = vars()
3706 self._math._invert_phase(self)
3707 self._add_history("invert_phase", varlist)
3708 return
3709
3710 @asaplog_post_dec
3711 def add(self, offset, insitu=None):
3712 """\
3713 Return a scan where all spectra have the offset added
3714
3715 Parameters:
3716
3717 offset: the offset
3718
3719 insitu: if False a new scantable is returned.
3720 Otherwise, the scaling is done in-situ
3721 The default is taken from .asaprc (False)
3722
3723 """
3724 if insitu is None: insitu = rcParams['insitu']
3725 self._math._setinsitu(insitu)
3726 varlist = vars()
3727 s = scantable(self._math._unaryop(self, offset, "ADD", False))
3728 s._add_history("add", varlist)
3729 if insitu:
3730 self._assign(s)
3731 else:
3732 return s
3733
3734 @asaplog_post_dec
3735 def scale(self, factor, tsys=True, insitu=None):
3736 """\
3737
3738 Return a scan where all spectra are scaled by the given 'factor'
3739
3740 Parameters:
3741
3742 factor: the scaling factor (float or 1D float list)
3743
3744 insitu: if False a new scantable is returned.
3745 Otherwise, the scaling is done in-situ
3746 The default is taken from .asaprc (False)
3747
3748 tsys: if True (default) then apply the operation to Tsys
3749 as well as the data
3750
3751 """
3752 if insitu is None: insitu = rcParams['insitu']
3753 self._math._setinsitu(insitu)
3754 varlist = vars()
3755 s = None
3756 import numpy
3757 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
3758 if isinstance(factor[0], list) or isinstance(factor[0],
3759 numpy.ndarray):
3760 from asapmath import _array2dOp
3761 s = _array2dOp( self, factor, "MUL", tsys, insitu )
3762 else:
3763 s = scantable( self._math._arrayop( self, factor,
3764 "MUL", tsys ) )
3765 else:
3766 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
3767 s._add_history("scale", varlist)
3768 if insitu:
3769 self._assign(s)
3770 else:
3771 return s
3772
3773 @preserve_selection
3774 def set_sourcetype(self, match, matchtype="pattern",
3775 sourcetype="reference"):
3776 """\
3777 Set the type of the source to be an source or reference scan
3778 using the provided pattern.
3779
3780 Parameters:
3781
3782 match: a Unix style pattern, regular expression or selector
3783
3784 matchtype: 'pattern' (default) UNIX style pattern or
3785 'regex' regular expression
3786
3787 sourcetype: the type of the source to use (source/reference)
3788
3789 """
3790 varlist = vars()
3791 stype = -1
3792 if sourcetype.lower().startswith("r") or sourcetype.lower() == "off":
3793 stype = 1
3794 elif sourcetype.lower().startswith("s") or sourcetype.lower() == "on":
3795 stype = 0
3796 else:
3797 raise ValueError("Illegal sourcetype use s(ource)/on or r(eference)/off")
3798 if matchtype.lower().startswith("p"):
3799 matchtype = "pattern"
3800 elif matchtype.lower().startswith("r"):
3801 matchtype = "regex"
3802 else:
3803 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
3804 sel = selector()
3805 if isinstance(match, selector):
3806 sel = match
3807 else:
3808 sel.set_query("SRCNAME=%s('%s')" % (matchtype, match))
3809 self.set_selection(sel)
3810 self._setsourcetype(stype)
3811 self._add_history("set_sourcetype", varlist)
3812
3813 @asaplog_post_dec
3814 @preserve_selection
3815 def auto_quotient(self, preserve=True, mode='paired', verify=False):
3816 """\
3817 This function allows to build quotients automatically.
3818 It assumes the observation to have the same number of
3819 "ons" and "offs"
3820
3821 Parameters:
3822
3823 preserve: you can preserve (default) the continuum or
3824 remove it. The equations used are
3825
3826 preserve: Output = Toff * (on/off) - Toff
3827
3828 remove: Output = Toff * (on/off) - Ton
3829
3830 mode: the on/off detection mode
3831 'paired' (default)
3832 identifies 'off' scans by the
3833 trailing '_R' (Mopra/Parkes) or
3834 '_e'/'_w' (Tid) and matches
3835 on/off pairs from the observing pattern
3836 'time'
3837 finds the closest off in time
3838
3839 .. todo:: verify argument is not implemented
3840
3841 """
3842 varlist = vars()
3843 modes = ["time", "paired"]
3844 if not mode in modes:
3845 msg = "please provide valid mode. Valid modes are %s" % (modes)
3846 raise ValueError(msg)
3847 s = None
3848 if mode.lower() == "paired":
3849 sel = self.get_selection()
3850 sel.set_query("SRCTYPE==psoff")
3851 self.set_selection(sel)
3852 offs = self.copy()
3853 sel.set_query("SRCTYPE==pson")
3854 self.set_selection(sel)
3855 ons = self.copy()
3856 s = scantable(self._math._quotient(ons, offs, preserve))
3857 elif mode.lower() == "time":
3858 s = scantable(self._math._auto_quotient(self, mode, preserve))
3859 s._add_history("auto_quotient", varlist)
3860 return s
3861
3862 @asaplog_post_dec
3863 def mx_quotient(self, mask = None, weight='median', preserve=True):
3864 """\
3865 Form a quotient using "off" beams when observing in "MX" mode.
3866
3867 Parameters:
3868
3869 mask: an optional mask to be used when weight == 'stddev'
3870
3871 weight: How to average the off beams. Default is 'median'.
3872
3873 preserve: you can preserve (default) the continuum or
3874 remove it. The equations used are:
3875
3876 preserve: Output = Toff * (on/off) - Toff
3877
3878 remove: Output = Toff * (on/off) - Ton
3879
3880 """
3881 mask = mask or ()
3882 varlist = vars()
3883 on = scantable(self._math._mx_extract(self, 'on'))
3884 preoff = scantable(self._math._mx_extract(self, 'off'))
3885 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
3886 from asapmath import quotient
3887 q = quotient(on, off, preserve)
3888 q._add_history("mx_quotient", varlist)
3889 return q
3890
3891 @asaplog_post_dec
3892 def freq_switch(self, insitu=None):
3893 """\
3894 Apply frequency switching to the data.
3895
3896 Parameters:
3897
3898 insitu: if False a new scantable is returned.
3899 Otherwise, the swictching is done in-situ
3900 The default is taken from .asaprc (False)
3901
3902 """
3903 if insitu is None: insitu = rcParams['insitu']
3904 self._math._setinsitu(insitu)
3905 varlist = vars()
3906 s = scantable(self._math._freqswitch(self))
3907 s._add_history("freq_switch", varlist)
3908 if insitu:
3909 self._assign(s)
3910 else:
3911 return s
3912
3913 @asaplog_post_dec
3914 def recalc_azel(self):
3915 """Recalculate the azimuth and elevation for each position."""
3916 varlist = vars()
3917 self._recalcazel()
3918 self._add_history("recalc_azel", varlist)
3919 return
3920
3921 @asaplog_post_dec
3922 def __add__(self, other):
3923 """
3924 implicit on all axes and on Tsys
3925 """
3926 varlist = vars()
3927 s = self.__op( other, "ADD" )
3928 s._add_history("operator +", varlist)
3929 return s
3930
3931 @asaplog_post_dec
3932 def __sub__(self, other):
3933 """
3934 implicit on all axes and on Tsys
3935 """
3936 varlist = vars()
3937 s = self.__op( other, "SUB" )
3938 s._add_history("operator -", varlist)
3939 return s
3940
3941 @asaplog_post_dec
3942 def __mul__(self, other):
3943 """
3944 implicit on all axes and on Tsys
3945 """
3946 varlist = vars()
3947 s = self.__op( other, "MUL" ) ;
3948 s._add_history("operator *", varlist)
3949 return s
3950
3951
3952 @asaplog_post_dec
3953 def __div__(self, other):
3954 """
3955 implicit on all axes and on Tsys
3956 """
3957 varlist = vars()
3958 s = self.__op( other, "DIV" )
3959 s._add_history("operator /", varlist)
3960 return s
3961
3962 @asaplog_post_dec
3963 def __op( self, other, mode ):
3964 s = None
3965 if isinstance(other, scantable):
3966 s = scantable(self._math._binaryop(self, other, mode))
3967 elif isinstance(other, float):
3968 if other == 0.0:
3969 raise ZeroDivisionError("Dividing by zero is not recommended")
3970 s = scantable(self._math._unaryop(self, other, mode, False))
3971 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3972 if isinstance(other[0], list) \
3973 or isinstance(other[0], numpy.ndarray):
3974 from asapmath import _array2dOp
3975 s = _array2dOp( self, other, mode, False )
3976 else:
3977 s = scantable( self._math._arrayop( self, other,
3978 mode, False ) )
3979 else:
3980 raise TypeError("Other input is not a scantable or float value")
3981 return s
3982
3983 @asaplog_post_dec
3984 def get_fit(self, row=0):
3985 """\
3986 Print or return the stored fits for a row in the scantable
3987
3988 Parameters:
3989
3990 row: the row which the fit has been applied to.
3991
3992 """
3993 if row > self.nrow():
3994 return
3995 from asap.asapfit import asapfit
3996 fit = asapfit(self._getfit(row))
3997 asaplog.push( '%s' %(fit) )
3998 return fit.as_dict()
3999
4000 @preserve_selection
4001 def flag_nans(self):
4002 """\
4003 Utility function to flag NaN values in the scantable.
4004 """
4005 import numpy
4006 basesel = self.get_selection()
4007 for i in range(self.nrow()):
4008 sel = self.get_row_selector(i)
4009 self.set_selection(basesel+sel)
4010 nans = numpy.isnan(self._getspectrum(0))
4011 if numpy.any(nans):
4012 bnans = [ bool(v) for v in nans]
4013 self.flag(bnans)
4014
4015 def get_row_selector(self, rowno):
4016 return selector(rows=[rowno])
4017
4018 def _add_history(self, funcname, parameters):
4019 if not rcParams['scantable.history']:
4020 return
4021 # create date
4022 sep = "##"
4023 from datetime import datetime
4024 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
4025 hist = dstr+sep
4026 hist += funcname+sep#cdate+sep
4027 if parameters.has_key('self'):
4028 del parameters['self']
4029 for k, v in parameters.iteritems():
4030 if type(v) is dict:
4031 for k2, v2 in v.iteritems():
4032 hist += k2
4033 hist += "="
4034 if isinstance(v2, scantable):
4035 hist += 'scantable'
4036 elif k2 == 'mask':
4037 if isinstance(v2, list) or isinstance(v2, tuple):
4038 hist += str(self._zip_mask(v2))
4039 else:
4040 hist += str(v2)
4041 else:
4042 hist += str(v2)
4043 else:
4044 hist += k
4045 hist += "="
4046 if isinstance(v, scantable):
4047 hist += 'scantable'
4048 elif k == 'mask':
4049 if isinstance(v, list) or isinstance(v, tuple):
4050 hist += str(self._zip_mask(v))
4051 else:
4052 hist += str(v)
4053 else:
4054 hist += str(v)
4055 hist += sep
4056 hist = hist[:-2] # remove trailing '##'
4057 self._addhistory(hist)
4058
4059
4060 def _zip_mask(self, mask):
4061 mask = list(mask)
4062 i = 0
4063 segments = []
4064 while mask[i:].count(1):
4065 i += mask[i:].index(1)
4066 if mask[i:].count(0):
4067 j = i + mask[i:].index(0)
4068 else:
4069 j = len(mask)
4070 segments.append([i, j])
4071 i = j
4072 return segments
4073
4074 def _get_ordinate_label(self):
4075 fu = "("+self.get_fluxunit()+")"
4076 import re
4077 lbl = "Intensity"
4078 if re.match(".K.", fu):
4079 lbl = "Brightness Temperature "+ fu
4080 elif re.match(".Jy.", fu):
4081 lbl = "Flux density "+ fu
4082 return lbl
4083
4084 def _check_ifs(self):
4085# return len(set([self.nchan(i) for i in self.getifnos()])) == 1
4086 nchans = [self.nchan(i) for i in self.getifnos()]
4087 nchans = filter(lambda t: t > 0, nchans)
4088 return (sum(nchans)/len(nchans) == nchans[0])
4089
4090 @asaplog_post_dec
4091 def _fill(self, names, unit, average, opts={}):
4092 first = True
4093 fullnames = []
4094 for name in names:
4095 name = os.path.expandvars(name)
4096 name = os.path.expanduser(name)
4097 if not os.path.exists(name):
4098 msg = "File '%s' does not exists" % (name)
4099 raise IOError(msg)
4100 fullnames.append(name)
4101 if average:
4102 asaplog.push('Auto averaging integrations')
4103 stype = int(rcParams['scantable.storage'].lower() == 'disk')
4104 for name in fullnames:
4105 tbl = Scantable(stype)
4106 if is_ms( name ):
4107 r = msfiller( tbl )
4108 else:
4109 r = filler( tbl )
4110 msg = "Importing %s..." % (name)
4111 asaplog.push(msg, False)
4112 r.open(name, opts)
4113 rx = rcParams['scantable.reference']
4114 r.setreferenceexpr(rx)
4115 r.fill()
4116 if average:
4117 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
4118 if not first:
4119 tbl = self._math._merge([self, tbl])
4120 Scantable.__init__(self, tbl)
4121 r.close()
4122 del r, tbl
4123 first = False
4124 #flush log
4125 asaplog.post()
4126 if unit is not None:
4127 self.set_fluxunit(unit)
4128 if not is_casapy():
4129 self.set_freqframe(rcParams['scantable.freqframe'])
4130
4131 def _get_verify_action( self, msg, action=None ):
4132 valid_act = ['Y', 'N', 'A', 'R']
4133 if not action or not isinstance(action, str):
4134 action = raw_input("%s [Y/n/a/r] (h for help): " % msg)
4135 if action == '':
4136 return "Y"
4137 elif (action.upper()[0] in valid_act):
4138 return action.upper()[0]
4139 elif (action.upper()[0] in ['H','?']):
4140 print "Available actions of verification [Y|n|a|r]"
4141 print " Y : Yes for current data (default)"
4142 print " N : No for current data"
4143 print " A : Accept all in the following and exit from verification"
4144 print " R : Reject all in the following and exit from verification"
4145 print " H or ?: help (show this message)"
4146 return self._get_verify_action(msg)
4147 else:
4148 return 'Y'
4149
4150 def __getitem__(self, key):
4151 if key < 0:
4152 key += self.nrow()
4153 if key >= self.nrow():
4154 raise IndexError("Row index out of range.")
4155 return self._getspectrum(key)
4156
4157 def __setitem__(self, key, value):
4158 if key < 0:
4159 key += self.nrow()
4160 if key >= self.nrow():
4161 raise IndexError("Row index out of range.")
4162 if not hasattr(value, "__len__") or \
4163 len(value) > self.nchan(self.getif(key)):
4164 raise ValueError("Spectrum length doesn't match.")
4165 return self._setspectrum(value, key)
4166
4167 def __len__(self):
4168 return self.nrow()
4169
4170 def __iter__(self):
4171 for i in range(len(self)):
4172 yield self[i]
Note: See TracBrowser for help on using the repository browser.