source: trunk/python/scantable.py@ 2750

Last change on this file since 2750 was 2713, checked in by WataruKawasaki, 12 years ago

New Development: Yes

JIRA Issue: Yes CAS-3618

Ready for Test: Yes

Interface Changes: No

What Interface Changed:

Test Programs:

Put in Release Notes: No

Module(s): sd

Description: new method for scantable to calculate AIC, AICc, BIC and GCV.


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 162.0 KB
Line 
1"""This module defines the scantable class."""
2
3import os
4import tempfile
5import numpy
6try:
7 from functools import wraps as wraps_dec
8except ImportError:
9 from asap.compatibility import wraps as wraps_dec
10
11from asap.env import is_casapy
12from asap._asap import Scantable
13from asap._asap import filler, msfiller
14from asap.parameters import rcParams
15from asap.logging import asaplog, asaplog_post_dec
16from asap.selector import selector
17from asap.linecatalog import linecatalog
18from asap.coordinate import coordinate
19from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
20from asap.asapfitter import fitter
21
22def preserve_selection(func):
23 @wraps_dec(func)
24 def wrap(obj, *args, **kw):
25 basesel = obj.get_selection()
26 try:
27 val = func(obj, *args, **kw)
28 finally:
29 obj.set_selection(basesel)
30 return val
31 return wrap
32
33def is_scantable(filename):
34 """Is the given file a scantable?
35
36 Parameters:
37
38 filename: the name of the file/directory to test
39
40 """
41 if ( os.path.isdir(filename)
42 and os.path.exists(filename+'/table.info')
43 and os.path.exists(filename+'/table.dat') ):
44 f=open(filename+'/table.info')
45 l=f.readline()
46 f.close()
47 if ( l.find('Scantable') != -1 ):
48 return True
49 elif ( l.find('Measurement Set') == -1 and
50 l.find('Image') == -1 ):
51 return True
52 else:
53 return False
54 else:
55 return False
56## return (os.path.isdir(filename)
57## and not os.path.exists(filename+'/table.f1')
58## and os.path.exists(filename+'/table.info'))
59
60def is_ms(filename):
61 """Is the given file a MeasurementSet?
62
63 Parameters:
64
65 filename: the name of the file/directory to test
66
67 """
68 if ( os.path.isdir(filename)
69 and os.path.exists(filename+'/table.info')
70 and os.path.exists(filename+'/table.dat') ):
71 f=open(filename+'/table.info')
72 l=f.readline()
73 f.close()
74 if ( l.find('Measurement Set') != -1 ):
75 return True
76 else:
77 return False
78 else:
79 return False
80
81def normalise_edge_param(edge):
82 """\
83 Convert a given edge value to a one-dimensional array that can be
84 given to baseline-fitting/subtraction functions.
85 The length of the output value will be an even because values for
86 the both sides of spectra are to be contained for each IF. When
87 the length is 2, the values will be applied to all IFs. If the length
88 is larger than 2, it will be 2*ifnos().
89 Accepted format of edge include:
90 * an integer - will be used for both sides of spectra of all IFs.
91 e.g. 10 is converted to [10,10]
92 * an empty list/tuple [] - converted to [0, 0] and used for all IFs.
93 * a list/tuple containing an integer - same as the above case.
94 e.g. [10] is converted to [10,10]
95 * a list/tuple containing two integers - will be used for all IFs.
96 e.g. [5,10] is output as it is. no need to convert.
97 * a list/tuple of lists/tuples containing TWO integers -
98 each element of edge will be used for each IF.
99 e.g. [[5,10],[15,20]] - [5,10] for IF[0] and [15,20] for IF[1].
100
101 If an element contains the same integer values, the input 'edge'
102 parameter can be given in a simpler shape in the following cases:
103 ** when len(edge)!=2
104 any elements containing the same values can be replaced
105 to single integers.
106 e.g. [[15,15]] can be simplified to [15] (or [15,15] or 15 also).
107 e.g. [[1,1],[2,2],[3,3]] can be simplified to [1,2,3].
108 ** when len(edge)=2
109 care is needed for this case: ONLY ONE of the
110 elements can be a single integer,
111 e.g. [[5,5],[10,10]] can be simplified to [5,[10,10]]
112 or [[5,5],10], but can NOT be simplified to [5,10].
113 when [5,10] given, it is interpreted as
114 [[5,10],[5,10],[5,10],...] instead, as shown before.
115 """
116 from asap import _is_sequence_or_number as _is_valid
117 if isinstance(edge, list) or isinstance(edge, tuple):
118 for edgepar in edge:
119 if not _is_valid(edgepar, int):
120 raise ValueError, "Each element of the 'edge' tuple has \
121 to be a pair of integers or an integer."
122 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
123 if len(edgepar) != 2:
124 raise ValueError, "Each element of the 'edge' tuple has \
125 to be a pair of integers or an integer."
126 else:
127 if not _is_valid(edge, int):
128 raise ValueError, "Parameter 'edge' has to be an integer or a \
129 pair of integers specified as a tuple. \
130 Nested tuples are allowed \
131 to make individual selection for different IFs."
132
133
134 if isinstance(edge, int):
135 edge = [ edge, edge ] # e.g. 3 => [3,3]
136 elif isinstance(edge, list) or isinstance(edge, tuple):
137 if len(edge) == 0:
138 edge = [0, 0] # e.g. [] => [0,0]
139 elif len(edge) == 1:
140 if isinstance(edge[0], int):
141 edge = [ edge[0], edge[0] ] # e.g. [1] => [1,1]
142
143 commonedge = True
144 if len(edge) > 2: commonedge = False
145 else:
146 for edgepar in edge:
147 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
148 commonedge = False
149 break
150
151 if commonedge:
152 if len(edge) > 1:
153 norm_edge = edge
154 else:
155 norm_edge = edge + edge
156 else:
157 norm_edge = []
158 for edgepar in edge:
159 if isinstance(edgepar, int):
160 norm_edge += [edgepar, edgepar]
161 else:
162 norm_edge += edgepar
163
164 return norm_edge
165
166def raise_fitting_failure_exception(e):
167 msg = "The fit failed, possibly because it didn't converge."
168 if rcParams["verbose"]:
169 asaplog.push(str(e))
170 asaplog.push(str(msg))
171 else:
172 raise RuntimeError(str(e)+'\n'+msg)
173
174def pack_progress_params(showprogress, minnrow):
175 return str(showprogress).lower() + ',' + str(minnrow)
176
177class scantable(Scantable):
178 """\
179 The ASAP container for scans (single-dish data).
180 """
181
182 @asaplog_post_dec
183 def __init__(self, filename, average=None, unit=None, parallactify=None,
184 **args):
185 """\
186 Create a scantable from a saved one or make a reference
187
188 Parameters:
189
190 filename: the name of an asap table on disk
191 or
192 the name of a rpfits/sdfits/ms file
193 (integrations within scans are auto averaged
194 and the whole file is read) or
195 [advanced] a reference to an existing scantable
196
197 average: average all integrations withinb a scan on read.
198 The default (True) is taken from .asaprc.
199
200 unit: brightness unit; must be consistent with K or Jy.
201 Over-rides the default selected by the filler
202 (input rpfits/sdfits/ms) or replaces the value
203 in existing scantables
204
205 antenna: for MeasurementSet input data only:
206 Antenna selection. integer (id) or string
207 (name or id).
208
209 parallactify: Indicate that the data had been parallactified.
210 Default (false) is taken from rc file.
211
212 """
213 if average is None:
214 average = rcParams['scantable.autoaverage']
215 parallactify = parallactify or rcParams['scantable.parallactify']
216 varlist = vars()
217 from asap._asap import stmath
218 self._math = stmath( rcParams['insitu'] )
219 if isinstance(filename, Scantable):
220 Scantable.__init__(self, filename)
221 else:
222 if isinstance(filename, str):
223 filename = os.path.expandvars(filename)
224 filename = os.path.expanduser(filename)
225 if not os.path.exists(filename):
226 s = "File '%s' not found." % (filename)
227 raise IOError(s)
228 if is_scantable(filename):
229 ondisk = rcParams['scantable.storage'] == 'disk'
230 Scantable.__init__(self, filename, ondisk)
231 if unit is not None:
232 self.set_fluxunit(unit)
233 if average:
234 self._assign( self.average_time( scanav=True ) )
235 # do not reset to the default freqframe
236 #self.set_freqframe(rcParams['scantable.freqframe'])
237 elif is_ms(filename):
238 # Measurement Set
239 opts={'ms': {}}
240 mskeys=['getpt','antenna']
241 for key in mskeys:
242 if key in args.keys():
243 opts['ms'][key] = args[key]
244 self._fill([filename], unit, average, opts)
245 elif os.path.isfile(filename):
246 self._fill([filename], unit, average)
247 # only apply to new data not "copy constructor"
248 self.parallactify(parallactify)
249 else:
250 msg = "The given file '%s'is not a valid " \
251 "asap table." % (filename)
252 raise IOError(msg)
253 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
254 and isinstance(filename[-1], str):
255 self._fill(filename, unit, average)
256 self.parallactify(parallactify)
257 self._add_history("scantable", varlist)
258
259 @asaplog_post_dec
260 def save(self, name=None, format=None, overwrite=False):
261 """\
262 Store the scantable on disk. This can be an asap (aips++) Table,
263 SDFITS or MS2 format.
264
265 Parameters:
266
267 name: the name of the outputfile. For format 'ASCII'
268 this is the root file name (data in 'name'.txt
269 and header in 'name'_header.txt)
270
271 format: an optional file format. Default is ASAP.
272 Allowed are:
273
274 * 'ASAP' (save as ASAP [aips++] Table),
275 * 'SDFITS' (save as SDFITS file)
276 * 'ASCII' (saves as ascii text file)
277 * 'MS2' (saves as an casacore MeasurementSet V2)
278 * 'FITS' (save as image FITS - not readable by
279 class)
280 * 'CLASS' (save as FITS readable by CLASS)
281
282 overwrite: If the file should be overwritten if it exists.
283 The default False is to return with warning
284 without writing the output. USE WITH CARE.
285
286 Example::
287
288 scan.save('myscan.asap')
289 scan.save('myscan.sdfits', 'SDFITS')
290
291 """
292 from os import path
293 format = format or rcParams['scantable.save']
294 suffix = '.'+format.lower()
295 if name is None or name == "":
296 name = 'scantable'+suffix
297 msg = "No filename given. Using default name %s..." % name
298 asaplog.push(msg)
299 name = path.expandvars(name)
300 if path.isfile(name) or path.isdir(name):
301 if not overwrite:
302 msg = "File %s exists." % name
303 raise IOError(msg)
304 format2 = format.upper()
305 if format2 == 'ASAP':
306 self._save(name)
307 elif format2 == 'MS2':
308 msopt = {'ms': {'overwrite': overwrite } }
309 from asap._asap import mswriter
310 writer = mswriter( self )
311 writer.write( name, msopt )
312 else:
313 from asap._asap import stwriter as stw
314 writer = stw(format2)
315 writer.write(self, name)
316 return
317
318 def copy(self):
319 """Return a copy of this scantable.
320
321 *Note*:
322
323 This makes a full (deep) copy. scan2 = scan1 makes a reference.
324
325 Example::
326
327 copiedscan = scan.copy()
328
329 """
330 sd = scantable(Scantable._copy(self))
331 return sd
332
333 def drop_scan(self, scanid=None):
334 """\
335 Return a new scantable where the specified scan number(s) has(have)
336 been dropped.
337
338 Parameters:
339
340 scanid: a (list of) scan number(s)
341
342 """
343 from asap import _is_sequence_or_number as _is_valid
344 from asap import _to_list
345 from asap import unique
346 if not _is_valid(scanid):
347 raise RuntimeError( 'Please specify a scanno to drop from the'
348 ' scantable' )
349 scanid = _to_list(scanid)
350 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
351 for sid in scanid: allscans.remove(sid)
352 if len(allscans) == 0:
353 raise ValueError("Can't remove all scans")
354 sel = selector(scans=allscans)
355 return self._select_copy(sel)
356
357 def _select_copy(self, selection):
358 orig = self.get_selection()
359 self.set_selection(orig+selection)
360 cp = self.copy()
361 self.set_selection(orig)
362 return cp
363
364 def get_scan(self, scanid=None):
365 """\
366 Return a specific scan (by scanno) or collection of scans (by
367 source name) in a new scantable.
368
369 *Note*:
370
371 See scantable.drop_scan() for the inverse operation.
372
373 Parameters:
374
375 scanid: a (list of) scanno or a source name, unix-style
376 patterns are accepted for source name matching, e.g.
377 '*_R' gets all 'ref scans
378
379 Example::
380
381 # get all scans containing the source '323p459'
382 newscan = scan.get_scan('323p459')
383 # get all 'off' scans
384 refscans = scan.get_scan('*_R')
385 # get a susbset of scans by scanno (as listed in scan.summary())
386 newscan = scan.get_scan([0, 2, 7, 10])
387
388 """
389 if scanid is None:
390 raise RuntimeError( 'Please specify a scan no or name to '
391 'retrieve from the scantable' )
392 try:
393 bsel = self.get_selection()
394 sel = selector()
395 if type(scanid) is str:
396 sel.set_name(scanid)
397 return self._select_copy(sel)
398 elif type(scanid) is int:
399 sel.set_scans([scanid])
400 return self._select_copy(sel)
401 elif type(scanid) is list:
402 sel.set_scans(scanid)
403 return self._select_copy(sel)
404 else:
405 msg = "Illegal scanid type, use 'int' or 'list' if ints."
406 raise TypeError(msg)
407 except RuntimeError:
408 raise
409
410 def __str__(self):
411 tempFile = tempfile.NamedTemporaryFile()
412 Scantable._summary(self, tempFile.name)
413 tempFile.seek(0)
414 asaplog.clear()
415 return tempFile.file.read()
416
417 @asaplog_post_dec
418 def summary(self, filename=None):
419 """\
420 Print a summary of the contents of this scantable.
421
422 Parameters:
423
424 filename: the name of a file to write the putput to
425 Default - no file output
426
427 """
428 if filename is not None:
429 if filename is "":
430 filename = 'scantable_summary.txt'
431 from os.path import expandvars, isdir
432 filename = expandvars(filename)
433 if isdir(filename):
434 msg = "Illegal file name '%s'." % (filename)
435 raise IOError(msg)
436 else:
437 filename = ""
438 Scantable._summary(self, filename)
439
440 def get_spectrum(self, rowno):
441 """Return the spectrum for the current row in the scantable as a list.
442
443 Parameters:
444
445 rowno: the row number to retrieve the spectrum from
446
447 """
448 return self._getspectrum(rowno)
449
450 def get_mask(self, rowno):
451 """Return the mask for the current row in the scantable as a list.
452
453 Parameters:
454
455 rowno: the row number to retrieve the mask from
456
457 """
458 return self._getmask(rowno)
459
460 def set_spectrum(self, spec, rowno):
461 """Set the spectrum for the current row in the scantable.
462
463 Parameters:
464
465 spec: the new spectrum
466
467 rowno: the row number to set the spectrum for
468
469 """
470 assert(len(spec) == self.nchan(self.getif(rowno)))
471 return self._setspectrum(spec, rowno)
472
473 def get_coordinate(self, rowno):
474 """Return the (spectral) coordinate for a a given 'rowno'.
475
476 *Note*:
477
478 * This coordinate is only valid until a scantable method modifies
479 the frequency axis.
480 * This coordinate does contain the original frequency set-up
481 NOT the new frame. The conversions however are done using the user
482 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
483 use scantable.freq_align first. Without it there is no closure,
484 i.e.::
485
486 c = myscan.get_coordinate(0)
487 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
488
489 Parameters:
490
491 rowno: the row number for the spectral coordinate
492
493 """
494 return coordinate(Scantable.get_coordinate(self, rowno))
495
496 def get_selection(self):
497 """\
498 Get the selection object currently set on this scantable.
499
500 Example::
501
502 sel = scan.get_selection()
503 sel.set_ifs(0) # select IF 0
504 scan.set_selection(sel) # apply modified selection
505
506 """
507 return selector(self._getselection())
508
509 def set_selection(self, selection=None, **kw):
510 """\
511 Select a subset of the data. All following operations on this scantable
512 are only applied to thi selection.
513
514 Parameters:
515
516 selection: a selector object (default unset the selection), or
517 any combination of 'pols', 'ifs', 'beams', 'scans',
518 'cycles', 'name', 'query'
519
520 Examples::
521
522 sel = selector() # create a selection object
523 self.set_scans([0, 3]) # select SCANNO 0 and 3
524 scan.set_selection(sel) # set the selection
525 scan.summary() # will only print summary of scanno 0 an 3
526 scan.set_selection() # unset the selection
527 # or the equivalent
528 scan.set_selection(scans=[0,3])
529 scan.summary() # will only print summary of scanno 0 an 3
530 scan.set_selection() # unset the selection
531
532 """
533 if selection is None:
534 # reset
535 if len(kw) == 0:
536 selection = selector()
537 else:
538 # try keywords
539 for k in kw:
540 if k not in selector.fields:
541 raise KeyError("Invalid selection key '%s', "
542 "valid keys are %s" % (k,
543 selector.fields))
544 selection = selector(**kw)
545 self._setselection(selection)
546
547 def get_row(self, row=0, insitu=None):
548 """\
549 Select a row in the scantable.
550 Return a scantable with single row.
551
552 Parameters:
553
554 row: row no of integration, default is 0.
555 insitu: if False a new scantable is returned. Otherwise, the
556 scaling is done in-situ. The default is taken from .asaprc
557 (False)
558
559 """
560 if insitu is None:
561 insitu = rcParams['insitu']
562 if not insitu:
563 workscan = self.copy()
564 else:
565 workscan = self
566 # Select a row
567 sel = selector()
568 sel.set_rows([row])
569 workscan.set_selection(sel)
570 if not workscan.nrow() == 1:
571 msg = "Could not identify single row. %d rows selected." \
572 % (workscan.nrow())
573 raise RuntimeError(msg)
574 if insitu:
575 self._assign(workscan)
576 else:
577 return workscan
578
579 @asaplog_post_dec
580 def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
581 """\
582 Determine the specified statistic of the current beam/if/pol
583 Takes a 'mask' as an optional parameter to specify which
584 channels should be excluded.
585
586 Parameters:
587
588 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
589 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
590
591 mask: an optional mask specifying where the statistic
592 should be determined.
593
594 form: format string to print statistic values
595
596 row: row number of spectrum to process.
597 (default is None: for all rows)
598
599 Example:
600 scan.set_unit('channel')
601 msk = scan.create_mask([100, 200], [500, 600])
602 scan.stats(stat='mean', mask=m)
603
604 """
605 mask = mask or []
606 if not self._check_ifs():
607 raise ValueError("Cannot apply mask as the IFs have different "
608 "number of channels. Please use setselection() "
609 "to select individual IFs")
610 rtnabc = False
611 if stat.lower().endswith('_abc'): rtnabc = True
612 getchan = False
613 if stat.lower().startswith('min') or stat.lower().startswith('max'):
614 chan = self._math._minmaxchan(self, mask, stat)
615 getchan = True
616 statvals = []
617 if not rtnabc:
618 if row == None:
619 statvals = self._math._stats(self, mask, stat)
620 else:
621 statvals = self._math._statsrow(self, mask, stat, int(row))
622
623 #def cb(i):
624 # return statvals[i]
625
626 #return self._row_callback(cb, stat)
627
628 label=stat
629 #callback=cb
630 out = ""
631 #outvec = []
632 sep = '-'*50
633
634 if row == None:
635 rows = xrange(self.nrow())
636 elif isinstance(row, int):
637 rows = [ row ]
638
639 for i in rows:
640 refstr = ''
641 statunit= ''
642 if getchan:
643 qx, qy = self.chan2data(rowno=i, chan=chan[i])
644 if rtnabc:
645 statvals.append(qx['value'])
646 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
647 statunit= '['+qx['unit']+']'
648 else:
649 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
650
651 tm = self._gettime(i)
652 src = self._getsourcename(i)
653 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
654 out += 'Time[%s]:\n' % (tm)
655 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (self.getbeam(i))
656 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
657 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
658 #outvec.append(callback(i))
659 if len(rows) > 1:
660 # out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
661 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
662 else:
663 # out += ('= %'+form) % (outvec[0]) +' '+refstr+'\n'
664 out += ('= %'+form) % (statvals[0]) +' '+refstr+'\n'
665 out += sep+"\n"
666
667 import os
668 if os.environ.has_key( 'USER' ):
669 usr = os.environ['USER']
670 else:
671 import commands
672 usr = commands.getoutput( 'whoami' )
673 tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
674 f = open(tmpfile,'w')
675 print >> f, sep
676 print >> f, ' %s %s' % (label, statunit)
677 print >> f, sep
678 print >> f, out
679 f.close()
680 f = open(tmpfile,'r')
681 x = f.readlines()
682 f.close()
683 asaplog.push(''.join(x), False)
684
685 return statvals
686
687 def chan2data(self, rowno=0, chan=0):
688 """\
689 Returns channel/frequency/velocity and spectral value
690 at an arbitrary row and channel in the scantable.
691
692 Parameters:
693
694 rowno: a row number in the scantable. Default is the
695 first row, i.e. rowno=0
696
697 chan: a channel in the scantable. Default is the first
698 channel, i.e. pos=0
699
700 """
701 if isinstance(rowno, int) and isinstance(chan, int):
702 qx = {'unit': self.get_unit(),
703 'value': self._getabcissa(rowno)[chan]}
704 qy = {'unit': self.get_fluxunit(),
705 'value': self._getspectrum(rowno)[chan]}
706 return qx, qy
707
708 def stddev(self, mask=None):
709 """\
710 Determine the standard deviation of the current beam/if/pol
711 Takes a 'mask' as an optional parameter to specify which
712 channels should be excluded.
713
714 Parameters:
715
716 mask: an optional mask specifying where the standard
717 deviation should be determined.
718
719 Example::
720
721 scan.set_unit('channel')
722 msk = scan.create_mask([100, 200], [500, 600])
723 scan.stddev(mask=m)
724
725 """
726 return self.stats(stat='stddev', mask=mask);
727
728
729 def get_column_names(self):
730 """\
731 Return a list of column names, which can be used for selection.
732 """
733 return list(Scantable.get_column_names(self))
734
735 def get_tsys(self, row=-1):
736 """\
737 Return the System temperatures.
738
739 Parameters:
740
741 row: the rowno to get the information for. (default all rows)
742
743 Returns:
744
745 a list of Tsys values for the current selection
746
747 """
748 if row > -1:
749 return self._get_column(self._gettsys, row)
750 return self._row_callback(self._gettsys, "Tsys")
751
752 def get_tsysspectrum(self, row=-1):
753 """\
754 Return the channel dependent system temperatures.
755
756 Parameters:
757
758 row: the rowno to get the information for. (default all rows)
759
760 Returns:
761
762 a list of Tsys values for the current selection
763
764 """
765 return self._get_column( self._gettsysspectrum, row )
766
767 def get_weather(self, row=-1):
768 """\
769 Return the weather informations.
770
771 Parameters:
772
773 row: the rowno to get the information for. (default all rows)
774
775 Returns:
776
777 a dict or list of of dicts of values for the current selection
778
779 """
780
781 values = self._get_column(self._get_weather, row)
782 if row > -1:
783 return {'temperature': values[0],
784 'pressure': values[1], 'humidity' : values[2],
785 'windspeed' : values[3], 'windaz' : values[4]
786 }
787 else:
788 out = []
789 for r in values:
790
791 out.append({'temperature': r[0],
792 'pressure': r[1], 'humidity' : r[2],
793 'windspeed' : r[3], 'windaz' : r[4]
794 })
795 return out
796
797 def _row_callback(self, callback, label):
798 out = ""
799 outvec = []
800 sep = '-'*50
801 for i in range(self.nrow()):
802 tm = self._gettime(i)
803 src = self._getsourcename(i)
804 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
805 out += 'Time[%s]:\n' % (tm)
806 if self.nbeam(-1) > 1:
807 out += ' Beam[%d] ' % (self.getbeam(i))
808 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
809 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
810 outvec.append(callback(i))
811 out += '= %3.3f\n' % (outvec[i])
812 out += sep+'\n'
813
814 asaplog.push(sep)
815 asaplog.push(" %s" % (label))
816 asaplog.push(sep)
817 asaplog.push(out)
818 asaplog.post()
819 return outvec
820
821 def _get_column(self, callback, row=-1, *args):
822 """
823 """
824 if row == -1:
825 return [callback(i, *args) for i in range(self.nrow())]
826 else:
827 if 0 <= row < self.nrow():
828 return callback(row, *args)
829
830
831 def get_time(self, row=-1, asdatetime=False, prec=-1):
832 """\
833 Get a list of time stamps for the observations.
834 Return a datetime object or a string (default) for each
835 integration time stamp in the scantable.
836
837 Parameters:
838
839 row: row no of integration. Default -1 return all rows
840
841 asdatetime: return values as datetime objects rather than strings
842
843 prec: number of digits shown. Default -1 to automatic
844 calculation.
845 Note this number is equals to the digits of MVTime,
846 i.e., 0<prec<3: dates with hh:: only,
847 <5: with hh:mm:, <7 or 0: with hh:mm:ss,
848 and 6> : with hh:mm:ss.tt... (prec-6 t's added)
849
850 """
851 from datetime import datetime
852 if prec < 0:
853 # automagically set necessary precision +1
854 prec = 7 - \
855 numpy.floor(numpy.log10(numpy.min(self.get_inttime(row))))
856 prec = max(6, int(prec))
857 else:
858 prec = max(0, prec)
859 if asdatetime:
860 #precision can be 1 millisecond at max
861 prec = min(12, prec)
862
863 times = self._get_column(self._gettime, row, prec)
864 if not asdatetime:
865 return times
866 format = "%Y/%m/%d/%H:%M:%S.%f"
867 if prec < 7:
868 nsub = 1 + (((6-prec)/2) % 3)
869 substr = [".%f","%S","%M"]
870 for i in range(nsub):
871 format = format.replace(substr[i],"")
872 if isinstance(times, list):
873 return [datetime.strptime(i, format) for i in times]
874 else:
875 return datetime.strptime(times, format)
876
877
878 def get_inttime(self, row=-1):
879 """\
880 Get a list of integration times for the observations.
881 Return a time in seconds for each integration in the scantable.
882
883 Parameters:
884
885 row: row no of integration. Default -1 return all rows.
886
887 """
888 return self._get_column(self._getinttime, row)
889
890
891 def get_sourcename(self, row=-1):
892 """\
893 Get a list source names for the observations.
894 Return a string for each integration in the scantable.
895 Parameters:
896
897 row: row no of integration. Default -1 return all rows.
898
899 """
900 return self._get_column(self._getsourcename, row)
901
902 def get_elevation(self, row=-1):
903 """\
904 Get a list of elevations for the observations.
905 Return a float for each integration in the scantable.
906
907 Parameters:
908
909 row: row no of integration. Default -1 return all rows.
910
911 """
912 return self._get_column(self._getelevation, row)
913
914 def get_azimuth(self, row=-1):
915 """\
916 Get a list of azimuths for the observations.
917 Return a float for each integration in the scantable.
918
919 Parameters:
920 row: row no of integration. Default -1 return all rows.
921
922 """
923 return self._get_column(self._getazimuth, row)
924
925 def get_parangle(self, row=-1):
926 """\
927 Get a list of parallactic angles for the observations.
928 Return a float for each integration in the scantable.
929
930 Parameters:
931
932 row: row no of integration. Default -1 return all rows.
933
934 """
935 return self._get_column(self._getparangle, row)
936
937 def get_direction(self, row=-1):
938 """
939 Get a list of Positions on the sky (direction) for the observations.
940 Return a string for each integration in the scantable.
941
942 Parameters:
943
944 row: row no of integration. Default -1 return all rows
945
946 """
947 return self._get_column(self._getdirection, row)
948
949 def get_directionval(self, row=-1):
950 """\
951 Get a list of Positions on the sky (direction) for the observations.
952 Return a float for each integration in the scantable.
953
954 Parameters:
955
956 row: row no of integration. Default -1 return all rows
957
958 """
959 return self._get_column(self._getdirectionvec, row)
960
961 @asaplog_post_dec
962 def set_unit(self, unit='channel'):
963 """\
964 Set the unit for all following operations on this scantable
965
966 Parameters:
967
968 unit: optional unit, default is 'channel'. Use one of '*Hz',
969 'km/s', 'channel' or equivalent ''
970
971 """
972 varlist = vars()
973 if unit in ['', 'pixel', 'channel']:
974 unit = ''
975 inf = list(self._getcoordinfo())
976 inf[0] = unit
977 self._setcoordinfo(inf)
978 self._add_history("set_unit", varlist)
979
980 @asaplog_post_dec
981 def set_instrument(self, instr):
982 """\
983 Set the instrument for subsequent processing.
984
985 Parameters:
986
987 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
988 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
989
990 """
991 self._setInstrument(instr)
992 self._add_history("set_instument", vars())
993
994 @asaplog_post_dec
995 def set_feedtype(self, feedtype):
996 """\
997 Overwrite the feed type, which might not be set correctly.
998
999 Parameters:
1000
1001 feedtype: 'linear' or 'circular'
1002
1003 """
1004 self._setfeedtype(feedtype)
1005 self._add_history("set_feedtype", vars())
1006
1007 @asaplog_post_dec
1008 def set_doppler(self, doppler='RADIO'):
1009 """\
1010 Set the doppler for all following operations on this scantable.
1011
1012 Parameters:
1013
1014 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
1015
1016 """
1017 varlist = vars()
1018 inf = list(self._getcoordinfo())
1019 inf[2] = doppler
1020 self._setcoordinfo(inf)
1021 self._add_history("set_doppler", vars())
1022
1023 @asaplog_post_dec
1024 def set_freqframe(self, frame=None):
1025 """\
1026 Set the frame type of the Spectral Axis.
1027
1028 Parameters:
1029
1030 frame: an optional frame type, default 'LSRK'. Valid frames are:
1031 'TOPO', 'LSRD', 'LSRK', 'BARY',
1032 'GEO', 'GALACTO', 'LGROUP', 'CMB'
1033
1034 Example::
1035
1036 scan.set_freqframe('BARY')
1037
1038 """
1039 frame = frame or rcParams['scantable.freqframe']
1040 varlist = vars()
1041 # "REST" is not implemented in casacore
1042 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
1043 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1044 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
1045 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1046
1047 if frame in valid:
1048 inf = list(self._getcoordinfo())
1049 inf[1] = frame
1050 self._setcoordinfo(inf)
1051 self._add_history("set_freqframe", varlist)
1052 else:
1053 msg = "Please specify a valid freq type. Valid types are:\n", valid
1054 raise TypeError(msg)
1055
1056 @asaplog_post_dec
1057 def set_dirframe(self, frame=""):
1058 """\
1059 Set the frame type of the Direction on the sky.
1060
1061 Parameters:
1062
1063 frame: an optional frame type, default ''. Valid frames are:
1064 'J2000', 'B1950', 'GALACTIC'
1065
1066 Example:
1067
1068 scan.set_dirframe('GALACTIC')
1069
1070 """
1071 varlist = vars()
1072 Scantable.set_dirframe(self, frame)
1073 self._add_history("set_dirframe", varlist)
1074
1075 def get_unit(self):
1076 """\
1077 Get the default unit set in this scantable
1078
1079 Returns:
1080
1081 A unit string
1082
1083 """
1084 inf = self._getcoordinfo()
1085 unit = inf[0]
1086 if unit == '': unit = 'channel'
1087 return unit
1088
1089 @asaplog_post_dec
1090 def get_abcissa(self, rowno=0):
1091 """\
1092 Get the abcissa in the current coordinate setup for the currently
1093 selected Beam/IF/Pol
1094
1095 Parameters:
1096
1097 rowno: an optional row number in the scantable. Default is the
1098 first row, i.e. rowno=0
1099
1100 Returns:
1101
1102 The abcissa values and the format string (as a dictionary)
1103
1104 """
1105 abc = self._getabcissa(rowno)
1106 lbl = self._getabcissalabel(rowno)
1107 return abc, lbl
1108
1109 @asaplog_post_dec
1110 def flag(self, mask=None, unflag=False, row=-1):
1111 """\
1112 Flag the selected data using an optional channel mask.
1113
1114 Parameters:
1115
1116 mask: an optional channel mask, created with create_mask. Default
1117 (no mask) is all channels.
1118
1119 unflag: if True, unflag the data
1120
1121 row: an optional row number in the scantable.
1122 Default -1 flags all rows
1123
1124 """
1125 varlist = vars()
1126 mask = mask or []
1127 self._flag(row, mask, unflag)
1128 self._add_history("flag", varlist)
1129
1130 @asaplog_post_dec
1131 def flag_row(self, rows=None, unflag=False):
1132 """\
1133 Flag the selected data in row-based manner.
1134
1135 Parameters:
1136
1137 rows: list of row numbers to be flagged. Default is no row
1138 (must be explicitly specified to execute row-based
1139 flagging).
1140
1141 unflag: if True, unflag the data.
1142
1143 """
1144 varlist = vars()
1145 if rows is None:
1146 rows = []
1147 self._flag_row(rows, unflag)
1148 self._add_history("flag_row", varlist)
1149
1150 @asaplog_post_dec
1151 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
1152 """\
1153 Flag the selected data outside a specified range (in channel-base)
1154
1155 Parameters:
1156
1157 uthres: upper threshold.
1158
1159 dthres: lower threshold
1160
1161 clipoutside: True for flagging data outside the range
1162 [dthres:uthres].
1163 False for flagging data inside the range.
1164
1165 unflag: if True, unflag the data.
1166
1167 """
1168 varlist = vars()
1169 self._clip(uthres, dthres, clipoutside, unflag)
1170 self._add_history("clip", varlist)
1171
1172 @asaplog_post_dec
1173 def lag_flag(self, start, end, unit="MHz", insitu=None):
1174 """\
1175 Flag the data in 'lag' space by providing a frequency to remove.
1176 Flagged data in the scantable get interpolated over the region.
1177 No taper is applied.
1178
1179 Parameters:
1180
1181 start: the start frequency (really a period within the
1182 bandwidth) or period to remove
1183
1184 end: the end frequency or period to remove
1185
1186 unit: the frequency unit (default 'MHz') or '' for
1187 explicit lag channels
1188
1189 *Notes*:
1190
1191 It is recommended to flag edges of the band or strong
1192 signals beforehand.
1193
1194 """
1195 if insitu is None: insitu = rcParams['insitu']
1196 self._math._setinsitu(insitu)
1197 varlist = vars()
1198 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1199 if not (unit == "" or base.has_key(unit)):
1200 raise ValueError("%s is not a valid unit." % unit)
1201 if unit == "":
1202 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1203 else:
1204 s = scantable(self._math._lag_flag(self, start*base[unit],
1205 end*base[unit], "frequency"))
1206 s._add_history("lag_flag", varlist)
1207 if insitu:
1208 self._assign(s)
1209 else:
1210 return s
1211
1212 @asaplog_post_dec
1213 def fft(self, rowno=None, mask=None, getrealimag=False):
1214 """\
1215 Apply FFT to the spectra.
1216 Flagged data in the scantable get interpolated over the region.
1217
1218 Parameters:
1219
1220 rowno: The row number(s) to be processed. int, list
1221 and tuple are accepted. By default (None), FFT
1222 is applied to the whole data.
1223
1224 mask: Auxiliary channel mask(s). Given as a boolean
1225 list, it is applied to all specified rows.
1226 A list of boolean lists can also be used to
1227 apply different masks. In the latter case, the
1228 length of 'mask' must be the same as that of
1229 'rowno'. The default is None.
1230
1231 getrealimag: If True, returns the real and imaginary part
1232 values of the complex results.
1233 If False (the default), returns the amplitude
1234 (absolute value) normalised with Ndata/2 and
1235 phase (argument, in unit of radian).
1236
1237 Returns:
1238
1239 A list of dictionaries containing the results for each spectrum.
1240 Each dictionary contains two values, the real and the imaginary
1241 parts when getrealimag = True, or the amplitude(absolute value)
1242 and the phase(argument) when getrealimag = False. The key for
1243 these values are 'real' and 'imag', or 'ampl' and 'phase',
1244 respectively.
1245 """
1246 if rowno is None:
1247 rowno = []
1248 if isinstance(rowno, int):
1249 rowno = [rowno]
1250 elif not (isinstance(rowno, list) or isinstance(rowno, tuple)):
1251 raise TypeError("The row number(s) must be int, list or tuple.")
1252 if len(rowno) == 0: rowno = [i for i in xrange(self.nrow())]
1253
1254 usecommonmask = True
1255
1256 if mask is None:
1257 mask = []
1258 if isinstance(mask, list) or isinstance(mask, tuple):
1259 if len(mask) == 0:
1260 mask = [[]]
1261 else:
1262 if isinstance(mask[0], bool):
1263 if len(mask) != self.nchan(self.getif(rowno[0])):
1264 raise ValueError("The spectra and the mask have "
1265 "different length.")
1266 mask = [mask]
1267 elif isinstance(mask[0], list) or isinstance(mask[0], tuple):
1268 usecommonmask = False
1269 if len(mask) != len(rowno):
1270 raise ValueError("When specifying masks for each "
1271 "spectrum, the numbers of them "
1272 "must be identical.")
1273 for i in xrange(mask):
1274 if len(mask[i]) != self.nchan(self.getif(rowno[i])):
1275 raise ValueError("The spectra and the mask have "
1276 "different length.")
1277 else:
1278 raise TypeError("The mask must be a boolean list or "
1279 "a list of boolean list.")
1280 else:
1281 raise TypeError("The mask must be a boolean list or a list of "
1282 "boolean list.")
1283
1284 res = []
1285
1286 imask = 0
1287 for whichrow in rowno:
1288 fspec = self._fft(whichrow, mask[imask], getrealimag)
1289 nspec = len(fspec)
1290
1291 i = 0
1292 v1 = []
1293 v2 = []
1294 reselem = {"real":[],"imag":[]} if getrealimag \
1295 else {"ampl":[],"phase":[]}
1296
1297 while (i < nspec):
1298 v1.append(fspec[i])
1299 v2.append(fspec[i+1])
1300 i += 2
1301
1302 if getrealimag:
1303 reselem["real"] += v1
1304 reselem["imag"] += v2
1305 else:
1306 reselem["ampl"] += v1
1307 reselem["phase"] += v2
1308
1309 res.append(reselem)
1310
1311 if not usecommonmask:
1312 imask += 1
1313
1314 return res
1315
1316 @asaplog_post_dec
1317 def create_mask(self, *args, **kwargs):
1318 """\
1319 Compute and return a mask based on [min, max] windows.
1320 The specified windows are to be INCLUDED, when the mask is
1321 applied.
1322
1323 Parameters:
1324
1325 [min, max], [min2, max2], ...
1326 Pairs of start/end points (inclusive)specifying the regions
1327 to be masked
1328
1329 invert: optional argument. If specified as True,
1330 return an inverted mask, i.e. the regions
1331 specified are EXCLUDED
1332
1333 row: create the mask using the specified row for
1334 unit conversions, default is row=0
1335 only necessary if frequency varies over rows.
1336
1337 Examples::
1338
1339 scan.set_unit('channel')
1340 # a)
1341 msk = scan.create_mask([400, 500], [800, 900])
1342 # masks everything outside 400 and 500
1343 # and 800 and 900 in the unit 'channel'
1344
1345 # b)
1346 msk = scan.create_mask([400, 500], [800, 900], invert=True)
1347 # masks the regions between 400 and 500
1348 # and 800 and 900 in the unit 'channel'
1349
1350 # c)
1351 #mask only channel 400
1352 msk = scan.create_mask([400])
1353
1354 """
1355 row = kwargs.get("row", 0)
1356 data = self._getabcissa(row)
1357 u = self._getcoordinfo()[0]
1358 if u == "":
1359 u = "channel"
1360 msg = "The current mask window unit is %s" % u
1361 i = self._check_ifs()
1362 if not i:
1363 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1364 asaplog.push(msg)
1365 n = len(data)
1366 msk = _n_bools(n, False)
1367 # test if args is a 'list' or a 'normal *args - UGLY!!!
1368
1369 ws = (isinstance(args[-1][-1], int)
1370 or isinstance(args[-1][-1], float)) and args or args[0]
1371 for window in ws:
1372 if len(window) == 1:
1373 window = [window[0], window[0]]
1374 if len(window) == 0 or len(window) > 2:
1375 raise ValueError("A window needs to be defined as "
1376 "[start(, end)]")
1377 if window[0] > window[1]:
1378 tmp = window[0]
1379 window[0] = window[1]
1380 window[1] = tmp
1381 for i in range(n):
1382 if data[i] >= window[0] and data[i] <= window[1]:
1383 msk[i] = True
1384 if kwargs.has_key('invert'):
1385 if kwargs.get('invert'):
1386 msk = mask_not(msk)
1387 return msk
1388
1389 def get_masklist(self, mask=None, row=0, silent=False):
1390 """\
1391 Compute and return a list of mask windows, [min, max].
1392
1393 Parameters:
1394
1395 mask: channel mask, created with create_mask.
1396
1397 row: calcutate the masklist using the specified row
1398 for unit conversions, default is row=0
1399 only necessary if frequency varies over rows.
1400
1401 Returns:
1402
1403 [min, max], [min2, max2], ...
1404 Pairs of start/end points (inclusive)specifying
1405 the masked regions
1406
1407 """
1408 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1409 raise TypeError("The mask should be list or tuple.")
1410 if len(mask) <= 0:
1411 raise TypeError("The mask elements should be > 0")
1412 data = self._getabcissa(row)
1413 if len(data) != len(mask):
1414 msg = "Number of channels in scantable != number of mask elements"
1415 raise TypeError(msg)
1416 u = self._getcoordinfo()[0]
1417 if u == "":
1418 u = "channel"
1419 msg = "The current mask window unit is %s" % u
1420 i = self._check_ifs()
1421 if not i:
1422 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1423 if not silent:
1424 asaplog.push(msg)
1425 masklist = []
1426 ist, ien = None, None
1427 ist, ien=self.get_mask_indices(mask)
1428 if ist is not None and ien is not None:
1429 for i in xrange(len(ist)):
1430 range=[data[ist[i]],data[ien[i]]]
1431 range.sort()
1432 masklist.append([range[0],range[1]])
1433 return masklist
1434
1435 def get_mask_indices(self, mask=None):
1436 """\
1437 Compute and Return lists of mask start indices and mask end indices.
1438
1439 Parameters:
1440
1441 mask: channel mask, created with create_mask.
1442
1443 Returns:
1444
1445 List of mask start indices and that of mask end indices,
1446 i.e., [istart1,istart2,....], [iend1,iend2,....].
1447
1448 """
1449 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1450 raise TypeError("The mask should be list or tuple.")
1451 if len(mask) <= 0:
1452 raise TypeError("The mask elements should be > 0")
1453 istart = []
1454 iend = []
1455 if mask[0]:
1456 istart.append(0)
1457 for i in range(len(mask)-1):
1458 if not mask[i] and mask[i+1]:
1459 istart.append(i+1)
1460 elif mask[i] and not mask[i+1]:
1461 iend.append(i)
1462 if mask[len(mask)-1]:
1463 iend.append(len(mask)-1)
1464 if len(istart) != len(iend):
1465 raise RuntimeError("Numbers of mask start != mask end.")
1466 for i in range(len(istart)):
1467 if istart[i] > iend[i]:
1468 raise RuntimeError("Mask start index > mask end index")
1469 break
1470 return istart,iend
1471
1472 @asaplog_post_dec
1473 def parse_maskexpr(self, maskstring):
1474 """
1475 Parse CASA type mask selection syntax (IF dependent).
1476
1477 Parameters:
1478 maskstring : A string mask selection expression.
1479 A comma separated selections mean different IF -
1480 channel combinations. IFs and channel selections
1481 are partitioned by a colon, ':'.
1482 examples:
1483 '' = all IFs (all channels)
1484 '<2,4~6,9' = IFs 0,1,4,5,6,9 (all channels)
1485 '3:3~45;60' = channels 3 to 45 and 60 in IF 3
1486 '0~1:2~6,8' = channels 2 to 6 in IFs 0,1, and
1487 all channels in IF8
1488 Returns:
1489 A dictionary of selected (valid) IF and masklist pairs,
1490 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
1491 """
1492 if not isinstance(maskstring,str):
1493 asaplog.post()
1494 asaplog.push("Mask expression should be a string.")
1495 asaplog.post("ERROR")
1496
1497 valid_ifs = self.getifnos()
1498 frequnit = self.get_unit()
1499 seldict = {}
1500 if maskstring == "":
1501 maskstring = str(valid_ifs)[1:-1]
1502 ## split each selection "IF range[:CHAN range]"
1503 sellist = maskstring.split(',')
1504 for currselstr in sellist:
1505 selset = currselstr.split(':')
1506 # spw and mask string (may include ~, < or >)
1507 spwmasklist = self._parse_selection(selset[0], typestr='integer',
1508 minval=min(valid_ifs),
1509 maxval=max(valid_ifs))
1510 for spwlist in spwmasklist:
1511 selspws = []
1512 for ispw in range(spwlist[0],spwlist[1]+1):
1513 # Put into the list only if ispw exists
1514 if valid_ifs.count(ispw):
1515 selspws.append(ispw)
1516 del spwmasklist, spwlist
1517
1518 # parse frequency mask list
1519 if len(selset) > 1:
1520 freqmasklist = self._parse_selection(selset[1], typestr='float',
1521 offset=0.)
1522 else:
1523 # want to select the whole spectrum
1524 freqmasklist = [None]
1525
1526 ## define a dictionary of spw - masklist combination
1527 for ispw in selspws:
1528 #print "working on", ispw
1529 spwstr = str(ispw)
1530 if len(selspws) == 0:
1531 # empty spw
1532 continue
1533 else:
1534 ## want to get min and max of the spw and
1535 ## offset to set for '<' and '>'
1536 if frequnit == 'channel':
1537 minfreq = 0
1538 maxfreq = self.nchan(ifno=ispw)
1539 offset = 0.5
1540 else:
1541 ## This is ugly part. need improvement
1542 for ifrow in xrange(self.nrow()):
1543 if self.getif(ifrow) == ispw:
1544 #print "IF",ispw,"found in row =",ifrow
1545 break
1546 freqcoord = self.get_coordinate(ifrow)
1547 freqs = self._getabcissa(ifrow)
1548 minfreq = min(freqs)
1549 maxfreq = max(freqs)
1550 if len(freqs) == 1:
1551 offset = 0.5
1552 elif frequnit.find('Hz') > 0:
1553 offset = abs(freqcoord.to_frequency(1,
1554 unit=frequnit)
1555 -freqcoord.to_frequency(0,
1556 unit=frequnit)
1557 )*0.5
1558 elif frequnit.find('m/s') > 0:
1559 offset = abs(freqcoord.to_velocity(1,
1560 unit=frequnit)
1561 -freqcoord.to_velocity(0,
1562 unit=frequnit)
1563 )*0.5
1564 else:
1565 asaplog.post()
1566 asaplog.push("Invalid frequency unit")
1567 asaplog.post("ERROR")
1568 del freqs, freqcoord, ifrow
1569 for freq in freqmasklist:
1570 selmask = freq or [minfreq, maxfreq]
1571 if selmask[0] == None:
1572 ## selection was "<freq[1]".
1573 if selmask[1] < minfreq:
1574 ## avoid adding region selection
1575 selmask = None
1576 else:
1577 selmask = [minfreq,selmask[1]-offset]
1578 elif selmask[1] == None:
1579 ## selection was ">freq[0]"
1580 if selmask[0] > maxfreq:
1581 ## avoid adding region selection
1582 selmask = None
1583 else:
1584 selmask = [selmask[0]+offset,maxfreq]
1585 if selmask:
1586 if not seldict.has_key(spwstr):
1587 # new spw selection
1588 seldict[spwstr] = []
1589 seldict[spwstr] += [selmask]
1590 del minfreq,maxfreq,offset,freq,selmask
1591 del spwstr
1592 del freqmasklist
1593 del valid_ifs
1594 if len(seldict) == 0:
1595 asaplog.post()
1596 asaplog.push("No valid selection in the mask expression: "
1597 +maskstring)
1598 asaplog.post("WARN")
1599 return None
1600 msg = "Selected masklist:\n"
1601 for sif, lmask in seldict.iteritems():
1602 msg += " IF"+sif+" - "+str(lmask)+"\n"
1603 asaplog.push(msg)
1604 return seldict
1605
1606 @asaplog_post_dec
1607 def parse_idx_selection(self, mode, selexpr):
1608 """
1609 Parse CASA type mask selection syntax of SCANNO, IFNO, POLNO,
1610 BEAMNO, and row number
1611
1612 Parameters:
1613 mode : which column to select.
1614 ['scan',|'if'|'pol'|'beam'|'row']
1615 selexpr : A comma separated selection expression.
1616 examples:
1617 '' = all (returns [])
1618 '<2,4~6,9' = indices less than 2, 4 to 6 and 9
1619 (returns [0,1,4,5,6,9])
1620 Returns:
1621 A List of selected indices
1622 """
1623 if selexpr == "":
1624 return []
1625 valid_modes = {'s': 'scan', 'i': 'if', 'p': 'pol',
1626 'b': 'beam', 'r': 'row'}
1627 smode = mode.lower()[0]
1628 if not (smode in valid_modes.keys()):
1629 msg = "Invalid mode '%s'. Valid modes are %s" %\
1630 (mode, str(valid_modes.values()))
1631 asaplog.post()
1632 asaplog.push(msg)
1633 asaplog.post("ERROR")
1634 mode = valid_modes[smode]
1635 minidx = None
1636 maxidx = None
1637 if smode == 'r':
1638 minidx = 0
1639 maxidx = self.nrow()
1640 else:
1641 idx = getattr(self,"get"+mode+"nos")()
1642 minidx = min(idx)
1643 maxidx = max(idx)
1644 del idx
1645 sellist = selexpr.split(',')
1646 idxlist = []
1647 for currselstr in sellist:
1648 # single range (may include ~, < or >)
1649 currlist = self._parse_selection(currselstr, typestr='integer',
1650 minval=minidx,maxval=maxidx)
1651 for thelist in currlist:
1652 idxlist += range(thelist[0],thelist[1]+1)
1653 msg = "Selected %s: %s" % (mode.upper()+"NO", str(idxlist))
1654 asaplog.push(msg)
1655 return idxlist
1656
1657 def _parse_selection(self, selstr, typestr='float', offset=0.,
1658 minval=None, maxval=None):
1659 """
1660 Parameters:
1661 selstr : The Selection string, e.g., '<3;5~7;100~103;9'
1662 typestr : The type of the values in returned list
1663 ('integer' or 'float')
1664 offset : The offset value to subtract from or add to
1665 the boundary value if the selection string
1666 includes '<' or '>' [Valid only for typestr='float']
1667 minval, maxval : The minimum/maximum values to set if the
1668 selection string includes '<' or '>'.
1669 The list element is filled with None by default.
1670 Returns:
1671 A list of min/max pair of selections.
1672 Example:
1673 _parse_selection('<3;5~7;9',typestr='int',minval=0)
1674 --> returns [[0,2],[5,7],[9,9]]
1675 _parse_selection('<3;5~7;9',typestr='float',offset=0.5,minval=0)
1676 --> returns [[0.,2.5],[5.0,7.0],[9.,9.]]
1677 """
1678 selgroups = selstr.split(';')
1679 sellists = []
1680 if typestr.lower().startswith('int'):
1681 formatfunc = int
1682 offset = 1
1683 else:
1684 formatfunc = float
1685
1686 for currsel in selgroups:
1687 if currsel.find('~') > 0:
1688 # val0 <= x <= val1
1689 minsel = formatfunc(currsel.split('~')[0].strip())
1690 maxsel = formatfunc(currsel.split('~')[1].strip())
1691 elif currsel.strip().find('<=') > -1:
1692 bound = currsel.split('<=')
1693 try: # try "x <= val"
1694 minsel = minval
1695 maxsel = formatfunc(bound[1].strip())
1696 except ValueError: # now "val <= x"
1697 minsel = formatfunc(bound[0].strip())
1698 maxsel = maxval
1699 elif currsel.strip().find('>=') > -1:
1700 bound = currsel.split('>=')
1701 try: # try "x >= val"
1702 minsel = formatfunc(bound[1].strip())
1703 maxsel = maxval
1704 except ValueError: # now "val >= x"
1705 minsel = minval
1706 maxsel = formatfunc(bound[0].strip())
1707 elif currsel.strip().find('<') > -1:
1708 bound = currsel.split('<')
1709 try: # try "x < val"
1710 minsel = minval
1711 maxsel = formatfunc(bound[1].strip()) \
1712 - formatfunc(offset)
1713 except ValueError: # now "val < x"
1714 minsel = formatfunc(bound[0].strip()) \
1715 + formatfunc(offset)
1716 maxsel = maxval
1717 elif currsel.strip().find('>') > -1:
1718 bound = currsel.split('>')
1719 try: # try "x > val"
1720 minsel = formatfunc(bound[1].strip()) \
1721 + formatfunc(offset)
1722 maxsel = maxval
1723 except ValueError: # now "val > x"
1724 minsel = minval
1725 maxsel = formatfunc(bound[0].strip()) \
1726 - formatfunc(offset)
1727 else:
1728 minsel = formatfunc(currsel)
1729 maxsel = formatfunc(currsel)
1730 sellists.append([minsel,maxsel])
1731 return sellists
1732
1733# def get_restfreqs(self):
1734# """
1735# Get the restfrequency(s) stored in this scantable.
1736# The return value(s) are always of unit 'Hz'
1737# Parameters:
1738# none
1739# Returns:
1740# a list of doubles
1741# """
1742# return list(self._getrestfreqs())
1743
1744 def get_restfreqs(self, ids=None):
1745 """\
1746 Get the restfrequency(s) stored in this scantable.
1747 The return value(s) are always of unit 'Hz'
1748
1749 Parameters:
1750
1751 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1752 be retrieved
1753
1754 Returns:
1755
1756 dictionary containing ids and a list of doubles for each id
1757
1758 """
1759 if ids is None:
1760 rfreqs = {}
1761 idlist = self.getmolnos()
1762 for i in idlist:
1763 rfreqs[i] = list(self._getrestfreqs(i))
1764 return rfreqs
1765 else:
1766 if type(ids) == list or type(ids) == tuple:
1767 rfreqs = {}
1768 for i in ids:
1769 rfreqs[i] = list(self._getrestfreqs(i))
1770 return rfreqs
1771 else:
1772 return list(self._getrestfreqs(ids))
1773
1774 @asaplog_post_dec
1775 def set_restfreqs(self, freqs=None, unit='Hz'):
1776 """\
1777 Set or replace the restfrequency specified and
1778 if the 'freqs' argument holds a scalar,
1779 then that rest frequency will be applied to all the selected
1780 data. If the 'freqs' argument holds
1781 a vector, then it MUST be of equal or smaller length than
1782 the number of IFs (and the available restfrequencies will be
1783 replaced by this vector). In this case, *all* data have
1784 the restfrequency set per IF according
1785 to the corresponding value you give in the 'freqs' vector.
1786 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
1787 IF 1 gets restfreq 2e9.
1788
1789 You can also specify the frequencies via a linecatalog.
1790
1791 Parameters:
1792
1793 freqs: list of rest frequency values or string idenitfiers
1794
1795 unit: unit for rest frequency (default 'Hz')
1796
1797
1798 Example::
1799
1800 # set the given restfrequency for the all currently selected IFs
1801 scan.set_restfreqs(freqs=1.4e9)
1802 # set restfrequencies for the n IFs (n > 1) in the order of the
1803 # list, i.e
1804 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
1805 # len(list_of_restfreqs) == nIF
1806 # for nIF == 1 the following will set multiple restfrequency for
1807 # that IF
1808 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
1809 # set multiple restfrequencies per IF. as a list of lists where
1810 # the outer list has nIF elements, the inner s arbitrary
1811 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
1812
1813 *Note*:
1814
1815 To do more sophisticate Restfrequency setting, e.g. on a
1816 source and IF basis, use scantable.set_selection() before using
1817 this function::
1818
1819 # provided your scantable is called scan
1820 selection = selector()
1821 selection.set_name('ORION*')
1822 selection.set_ifs([1])
1823 scan.set_selection(selection)
1824 scan.set_restfreqs(freqs=86.6e9)
1825
1826 """
1827 varlist = vars()
1828 from asap import linecatalog
1829 # simple value
1830 if isinstance(freqs, int) or isinstance(freqs, float):
1831 self._setrestfreqs([freqs], [""], unit)
1832 # list of values
1833 elif isinstance(freqs, list) or isinstance(freqs, tuple):
1834 # list values are scalars
1835 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
1836 if len(freqs) == 1:
1837 self._setrestfreqs(freqs, [""], unit)
1838 else:
1839 # allow the 'old' mode of setting mulitple IFs
1840 savesel = self._getselection()
1841 sel = self.get_selection()
1842 iflist = self.getifnos()
1843 if len(freqs)>len(iflist):
1844 raise ValueError("number of elements in list of list "
1845 "exeeds the current IF selections")
1846 iflist = self.getifnos()
1847 for i, fval in enumerate(freqs):
1848 sel.set_ifs(iflist[i])
1849 self._setselection(sel)
1850 self._setrestfreqs([fval], [""], unit)
1851 self._setselection(savesel)
1852
1853 # list values are dict, {'value'=, 'name'=)
1854 elif isinstance(freqs[-1], dict):
1855 values = []
1856 names = []
1857 for d in freqs:
1858 values.append(d["value"])
1859 names.append(d["name"])
1860 self._setrestfreqs(values, names, unit)
1861 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
1862 savesel = self._getselection()
1863 sel = self.get_selection()
1864 iflist = self.getifnos()
1865 if len(freqs)>len(iflist):
1866 raise ValueError("number of elements in list of list exeeds"
1867 " the current IF selections")
1868 for i, fval in enumerate(freqs):
1869 sel.set_ifs(iflist[i])
1870 self._setselection(sel)
1871 self._setrestfreqs(fval, [""], unit)
1872 self._setselection(savesel)
1873 # freqs are to be taken from a linecatalog
1874 elif isinstance(freqs, linecatalog):
1875 savesel = self._getselection()
1876 sel = self.get_selection()
1877 for i in xrange(freqs.nrow()):
1878 sel.set_ifs(iflist[i])
1879 self._setselection(sel)
1880 self._setrestfreqs([freqs.get_frequency(i)],
1881 [freqs.get_name(i)], "MHz")
1882 # ensure that we are not iterating past nIF
1883 if i == self.nif()-1: break
1884 self._setselection(savesel)
1885 else:
1886 return
1887 self._add_history("set_restfreqs", varlist)
1888
1889 @asaplog_post_dec
1890 def shift_refpix(self, delta):
1891 """\
1892 Shift the reference pixel of the Spectra Coordinate by an
1893 integer amount.
1894
1895 Parameters:
1896
1897 delta: the amount to shift by
1898
1899 *Note*:
1900
1901 Be careful using this with broadband data.
1902
1903 """
1904 varlist = vars()
1905 Scantable.shift_refpix(self, delta)
1906 s._add_history("shift_refpix", varlist)
1907
1908 @asaplog_post_dec
1909 def history(self, filename=None):
1910 """\
1911 Print the history. Optionally to a file.
1912
1913 Parameters:
1914
1915 filename: The name of the file to save the history to.
1916
1917 """
1918 hist = list(self._gethistory())
1919 out = "-"*80
1920 for h in hist:
1921 if h.startswith("---"):
1922 out = "\n".join([out, h])
1923 else:
1924 items = h.split("##")
1925 date = items[0]
1926 func = items[1]
1927 items = items[2:]
1928 out += "\n"+date+"\n"
1929 out += "Function: %s\n Parameters:" % (func)
1930 for i in items:
1931 if i == '':
1932 continue
1933 s = i.split("=")
1934 out += "\n %s = %s" % (s[0], s[1])
1935 out = "\n".join([out, "-"*80])
1936 if filename is not None:
1937 if filename is "":
1938 filename = 'scantable_history.txt'
1939 import os
1940 filename = os.path.expandvars(os.path.expanduser(filename))
1941 if not os.path.isdir(filename):
1942 data = open(filename, 'w')
1943 data.write(out)
1944 data.close()
1945 else:
1946 msg = "Illegal file name '%s'." % (filename)
1947 raise IOError(msg)
1948 return page(out)
1949
1950 #
1951 # Maths business
1952 #
1953 @asaplog_post_dec
1954 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
1955 """\
1956 Return the (time) weighted average of a scan. Scans will be averaged
1957 only if the source direction (RA/DEC) is within 1' otherwise
1958
1959 *Note*:
1960
1961 in channels only - align if necessary
1962
1963 Parameters:
1964
1965 mask: an optional mask (only used for 'var' and 'tsys'
1966 weighting)
1967
1968 scanav: True averages each scan separately
1969 False (default) averages all scans together,
1970
1971 weight: Weighting scheme.
1972 'none' (mean no weight)
1973 'var' (1/var(spec) weighted)
1974 'tsys' (1/Tsys**2 weighted)
1975 'tint' (integration time weighted)
1976 'tintsys' (Tint/Tsys**2)
1977 'median' ( median averaging)
1978 The default is 'tint'
1979
1980 align: align the spectra in velocity before averaging. It takes
1981 the time of the first spectrum as reference time.
1982
1983 Example::
1984
1985 # time average the scantable without using a mask
1986 newscan = scan.average_time()
1987
1988 """
1989 varlist = vars()
1990 weight = weight or 'TINT'
1991 mask = mask or ()
1992 scanav = (scanav and 'SCAN') or 'NONE'
1993 scan = (self, )
1994
1995 if align:
1996 scan = (self.freq_align(insitu=False), )
1997 s = None
1998 if weight.upper() == 'MEDIAN':
1999 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
2000 scanav))
2001 else:
2002 s = scantable(self._math._average(scan, mask, weight.upper(),
2003 scanav))
2004 s._add_history("average_time", varlist)
2005 return s
2006
2007 @asaplog_post_dec
2008 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
2009 """\
2010 Return a scan where all spectra are converted to either
2011 Jansky or Kelvin depending upon the flux units of the scan table.
2012 By default the function tries to look the values up internally.
2013 If it can't find them (or if you want to over-ride), you must
2014 specify EITHER jyperk OR eta (and D which it will try to look up
2015 also if you don't set it). jyperk takes precedence if you set both.
2016
2017 Parameters:
2018
2019 jyperk: the Jy / K conversion factor
2020
2021 eta: the aperture efficiency
2022
2023 d: the geometric diameter (metres)
2024
2025 insitu: if False a new scantable is returned.
2026 Otherwise, the scaling is done in-situ
2027 The default is taken from .asaprc (False)
2028
2029 """
2030 if insitu is None: insitu = rcParams['insitu']
2031 self._math._setinsitu(insitu)
2032 varlist = vars()
2033 jyperk = jyperk or -1.0
2034 d = d or -1.0
2035 eta = eta or -1.0
2036 s = scantable(self._math._convertflux(self, d, eta, jyperk))
2037 s._add_history("convert_flux", varlist)
2038 if insitu: self._assign(s)
2039 else: return s
2040
2041 @asaplog_post_dec
2042 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
2043 """\
2044 Return a scan after applying a gain-elevation correction.
2045 The correction can be made via either a polynomial or a
2046 table-based interpolation (and extrapolation if necessary).
2047 You specify polynomial coefficients, an ascii table or neither.
2048 If you specify neither, then a polynomial correction will be made
2049 with built in coefficients known for certain telescopes (an error
2050 will occur if the instrument is not known).
2051 The data and Tsys are *divided* by the scaling factors.
2052
2053 Parameters:
2054
2055 poly: Polynomial coefficients (default None) to compute a
2056 gain-elevation correction as a function of
2057 elevation (in degrees).
2058
2059 filename: The name of an ascii file holding correction factors.
2060 The first row of the ascii file must give the column
2061 names and these MUST include columns
2062 'ELEVATION' (degrees) and 'FACTOR' (multiply data
2063 by this) somewhere.
2064 The second row must give the data type of the
2065 column. Use 'R' for Real and 'I' for Integer.
2066 An example file would be
2067 (actual factors are arbitrary) :
2068
2069 TIME ELEVATION FACTOR
2070 R R R
2071 0.1 0 0.8
2072 0.2 20 0.85
2073 0.3 40 0.9
2074 0.4 60 0.85
2075 0.5 80 0.8
2076 0.6 90 0.75
2077
2078 method: Interpolation method when correcting from a table.
2079 Values are 'nearest', 'linear' (default), 'cubic'
2080 and 'spline'
2081
2082 insitu: if False a new scantable is returned.
2083 Otherwise, the scaling is done in-situ
2084 The default is taken from .asaprc (False)
2085
2086 """
2087
2088 if insitu is None: insitu = rcParams['insitu']
2089 self._math._setinsitu(insitu)
2090 varlist = vars()
2091 poly = poly or ()
2092 from os.path import expandvars
2093 filename = expandvars(filename)
2094 s = scantable(self._math._gainel(self, poly, filename, method))
2095 s._add_history("gain_el", varlist)
2096 if insitu:
2097 self._assign(s)
2098 else:
2099 return s
2100
2101 @asaplog_post_dec
2102 def freq_align(self, reftime=None, method='cubic', insitu=None):
2103 """\
2104 Return a scan where all rows have been aligned in frequency/velocity.
2105 The alignment frequency frame (e.g. LSRK) is that set by function
2106 set_freqframe.
2107
2108 Parameters:
2109
2110 reftime: reference time to align at. By default, the time of
2111 the first row of data is used.
2112
2113 method: Interpolation method for regridding the spectra.
2114 Choose from 'nearest', 'linear', 'cubic' (default)
2115 and 'spline'
2116
2117 insitu: if False a new scantable is returned.
2118 Otherwise, the scaling is done in-situ
2119 The default is taken from .asaprc (False)
2120
2121 """
2122 if insitu is None: insitu = rcParams["insitu"]
2123 oldInsitu = self._math._insitu()
2124 self._math._setinsitu(insitu)
2125 varlist = vars()
2126 reftime = reftime or ""
2127 s = scantable(self._math._freq_align(self, reftime, method))
2128 s._add_history("freq_align", varlist)
2129 self._math._setinsitu(oldInsitu)
2130 if insitu:
2131 self._assign(s)
2132 else:
2133 return s
2134
2135 @asaplog_post_dec
2136 def opacity(self, tau=None, insitu=None):
2137 """\
2138 Apply an opacity correction. The data
2139 and Tsys are multiplied by the correction factor.
2140
2141 Parameters:
2142
2143 tau: (list of) opacity from which the correction factor is
2144 exp(tau*ZD)
2145 where ZD is the zenith-distance.
2146 If a list is provided, it has to be of length nIF,
2147 nIF*nPol or 1 and in order of IF/POL, e.g.
2148 [opif0pol0, opif0pol1, opif1pol0 ...]
2149 if tau is `None` the opacities are determined from a
2150 model.
2151
2152 insitu: if False a new scantable is returned.
2153 Otherwise, the scaling is done in-situ
2154 The default is taken from .asaprc (False)
2155
2156 """
2157 if insitu is None:
2158 insitu = rcParams['insitu']
2159 self._math._setinsitu(insitu)
2160 varlist = vars()
2161 if not hasattr(tau, "__len__"):
2162 tau = [tau]
2163 s = scantable(self._math._opacity(self, tau))
2164 s._add_history("opacity", varlist)
2165 if insitu:
2166 self._assign(s)
2167 else:
2168 return s
2169
2170 @asaplog_post_dec
2171 def bin(self, width=5, insitu=None):
2172 """\
2173 Return a scan where all spectra have been binned up.
2174
2175 Parameters:
2176
2177 width: The bin width (default=5) in pixels
2178
2179 insitu: if False a new scantable is returned.
2180 Otherwise, the scaling is done in-situ
2181 The default is taken from .asaprc (False)
2182
2183 """
2184 if insitu is None:
2185 insitu = rcParams['insitu']
2186 self._math._setinsitu(insitu)
2187 varlist = vars()
2188 s = scantable(self._math._bin(self, width))
2189 s._add_history("bin", varlist)
2190 if insitu:
2191 self._assign(s)
2192 else:
2193 return s
2194
2195 @asaplog_post_dec
2196 def reshape(self, first, last, insitu=None):
2197 """Resize the band by providing first and last channel.
2198 This will cut off all channels outside [first, last].
2199 """
2200 if insitu is None:
2201 insitu = rcParams['insitu']
2202 varlist = vars()
2203 if last < 0:
2204 last = self.nchan()-1 + last
2205 s = None
2206 if insitu:
2207 s = self
2208 else:
2209 s = self.copy()
2210 s._reshape(first,last)
2211 s._add_history("reshape", varlist)
2212 if not insitu:
2213 return s
2214
2215 @asaplog_post_dec
2216 def resample(self, width=5, method='cubic', insitu=None):
2217 """\
2218 Return a scan where all spectra have been binned up.
2219
2220 Parameters:
2221
2222 width: The bin width (default=5) in pixels
2223
2224 method: Interpolation method when correcting from a table.
2225 Values are 'nearest', 'linear', 'cubic' (default)
2226 and 'spline'
2227
2228 insitu: if False a new scantable is returned.
2229 Otherwise, the scaling is done in-situ
2230 The default is taken from .asaprc (False)
2231
2232 """
2233 if insitu is None:
2234 insitu = rcParams['insitu']
2235 self._math._setinsitu(insitu)
2236 varlist = vars()
2237 s = scantable(self._math._resample(self, method, width))
2238 s._add_history("resample", varlist)
2239 if insitu:
2240 self._assign(s)
2241 else:
2242 return s
2243
2244 @asaplog_post_dec
2245 def average_pol(self, mask=None, weight='none'):
2246 """\
2247 Average the Polarisations together.
2248
2249 Parameters:
2250
2251 mask: An optional mask defining the region, where the
2252 averaging will be applied. The output will have all
2253 specified points masked.
2254
2255 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2256 weighted), or 'tsys' (1/Tsys**2 weighted)
2257
2258 """
2259 varlist = vars()
2260 mask = mask or ()
2261 s = scantable(self._math._averagepol(self, mask, weight.upper()))
2262 s._add_history("average_pol", varlist)
2263 return s
2264
2265 @asaplog_post_dec
2266 def average_beam(self, mask=None, weight='none'):
2267 """\
2268 Average the Beams together.
2269
2270 Parameters:
2271 mask: An optional mask defining the region, where the
2272 averaging will be applied. The output will have all
2273 specified points masked.
2274
2275 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2276 weighted), or 'tsys' (1/Tsys**2 weighted)
2277
2278 """
2279 varlist = vars()
2280 mask = mask or ()
2281 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
2282 s._add_history("average_beam", varlist)
2283 return s
2284
2285 def parallactify(self, pflag):
2286 """\
2287 Set a flag to indicate whether this data should be treated as having
2288 been 'parallactified' (total phase == 0.0)
2289
2290 Parameters:
2291
2292 pflag: Bool indicating whether to turn this on (True) or
2293 off (False)
2294
2295 """
2296 varlist = vars()
2297 self._parallactify(pflag)
2298 self._add_history("parallactify", varlist)
2299
2300 @asaplog_post_dec
2301 def convert_pol(self, poltype=None):
2302 """\
2303 Convert the data to a different polarisation type.
2304 Note that you will need cross-polarisation terms for most conversions.
2305
2306 Parameters:
2307
2308 poltype: The new polarisation type. Valid types are:
2309 'linear', 'circular', 'stokes' and 'linpol'
2310
2311 """
2312 varlist = vars()
2313 s = scantable(self._math._convertpol(self, poltype))
2314 s._add_history("convert_pol", varlist)
2315 return s
2316
2317 @asaplog_post_dec
2318 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False,
2319 insitu=None):
2320 """\
2321 Smooth the spectrum by the specified kernel (conserving flux).
2322
2323 Parameters:
2324
2325 kernel: The type of smoothing kernel. Select from
2326 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
2327 or 'poly'
2328
2329 width: The width of the kernel in pixels. For hanning this is
2330 ignored otherwise it defauls to 5 pixels.
2331 For 'gaussian' it is the Full Width Half
2332 Maximum. For 'boxcar' it is the full width.
2333 For 'rmedian' and 'poly' it is the half width.
2334
2335 order: Optional parameter for 'poly' kernel (default is 2), to
2336 specify the order of the polnomial. Ignored by all other
2337 kernels.
2338
2339 plot: plot the original and the smoothed spectra.
2340 In this each indivual fit has to be approved, by
2341 typing 'y' or 'n'
2342
2343 insitu: if False a new scantable is returned.
2344 Otherwise, the scaling is done in-situ
2345 The default is taken from .asaprc (False)
2346
2347 """
2348 if insitu is None: insitu = rcParams['insitu']
2349 self._math._setinsitu(insitu)
2350 varlist = vars()
2351
2352 if plot: orgscan = self.copy()
2353
2354 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
2355 s._add_history("smooth", varlist)
2356
2357 action = 'H'
2358 if plot:
2359 from asap.asapplotter import new_asaplot
2360 theplot = new_asaplot(rcParams['plotter.gui'])
2361 from matplotlib import rc as rcp
2362 rcp('lines', linewidth=1)
2363 theplot.set_panels()
2364 ylab=s._get_ordinate_label()
2365 #theplot.palette(0,["#777777","red"])
2366 for r in xrange(s.nrow()):
2367 xsm=s._getabcissa(r)
2368 ysm=s._getspectrum(r)
2369 xorg=orgscan._getabcissa(r)
2370 yorg=orgscan._getspectrum(r)
2371 if action != "N": #skip plotting if rejecting all
2372 theplot.clear()
2373 theplot.hold()
2374 theplot.set_axes('ylabel',ylab)
2375 theplot.set_axes('xlabel',s._getabcissalabel(r))
2376 theplot.set_axes('title',s._getsourcename(r))
2377 theplot.set_line(label='Original',color="#777777")
2378 theplot.plot(xorg,yorg)
2379 theplot.set_line(label='Smoothed',color="red")
2380 theplot.plot(xsm,ysm)
2381 ### Ugly part for legend
2382 for i in [0,1]:
2383 theplot.subplots[0]['lines'].append(
2384 [theplot.subplots[0]['axes'].lines[i]]
2385 )
2386 theplot.release()
2387 ### Ugly part for legend
2388 theplot.subplots[0]['lines']=[]
2389 res = self._get_verify_action("Accept smoothing?",action)
2390 #print "IF%d, POL%d: got result = %s" %(s.getif(r),s.getpol(r),res)
2391 if r == 0: action = None
2392 #res = raw_input("Accept smoothing ([y]/n): ")
2393 if res.upper() == 'N':
2394 # reject for the current rows
2395 s._setspectrum(yorg, r)
2396 elif res.upper() == 'R':
2397 # reject all the following rows
2398 action = "N"
2399 s._setspectrum(yorg, r)
2400 elif res.upper() == 'A':
2401 # accept all the following rows
2402 break
2403 theplot.quit()
2404 del theplot
2405 del orgscan
2406
2407 if insitu: self._assign(s)
2408 else: return s
2409
2410 @asaplog_post_dec
2411 def regrid_channel(self, width=5, plot=False, insitu=None):
2412 """\
2413 Regrid the spectra by the specified channel width
2414
2415 Parameters:
2416
2417 width: The channel width (float) of regridded spectra
2418 in the current spectral unit.
2419
2420 plot: [NOT IMPLEMENTED YET]
2421 plot the original and the regridded spectra.
2422 In this each indivual fit has to be approved, by
2423 typing 'y' or 'n'
2424
2425 insitu: if False a new scantable is returned.
2426 Otherwise, the scaling is done in-situ
2427 The default is taken from .asaprc (False)
2428
2429 """
2430 if insitu is None: insitu = rcParams['insitu']
2431 varlist = vars()
2432
2433 if plot:
2434 asaplog.post()
2435 asaplog.push("Verification plot is not implemtnetd yet.")
2436 asaplog.post("WARN")
2437
2438 s = self.copy()
2439 s._regrid_specchan(width)
2440
2441 s._add_history("regrid_channel", varlist)
2442
2443# if plot:
2444# from asap.asapplotter import new_asaplot
2445# theplot = new_asaplot(rcParams['plotter.gui'])
2446# from matplotlib import rc as rcp
2447# rcp('lines', linewidth=1)
2448# theplot.set_panels()
2449# ylab=s._get_ordinate_label()
2450# #theplot.palette(0,["#777777","red"])
2451# for r in xrange(s.nrow()):
2452# xsm=s._getabcissa(r)
2453# ysm=s._getspectrum(r)
2454# xorg=orgscan._getabcissa(r)
2455# yorg=orgscan._getspectrum(r)
2456# theplot.clear()
2457# theplot.hold()
2458# theplot.set_axes('ylabel',ylab)
2459# theplot.set_axes('xlabel',s._getabcissalabel(r))
2460# theplot.set_axes('title',s._getsourcename(r))
2461# theplot.set_line(label='Original',color="#777777")
2462# theplot.plot(xorg,yorg)
2463# theplot.set_line(label='Smoothed',color="red")
2464# theplot.plot(xsm,ysm)
2465# ### Ugly part for legend
2466# for i in [0,1]:
2467# theplot.subplots[0]['lines'].append(
2468# [theplot.subplots[0]['axes'].lines[i]]
2469# )
2470# theplot.release()
2471# ### Ugly part for legend
2472# theplot.subplots[0]['lines']=[]
2473# res = raw_input("Accept smoothing ([y]/n): ")
2474# if res.upper() == 'N':
2475# s._setspectrum(yorg, r)
2476# theplot.quit()
2477# del theplot
2478# del orgscan
2479
2480 if insitu: self._assign(s)
2481 else: return s
2482
2483 @asaplog_post_dec
2484 def _parse_wn(self, wn):
2485 if isinstance(wn, list) or isinstance(wn, tuple):
2486 return wn
2487 elif isinstance(wn, int):
2488 return [ wn ]
2489 elif isinstance(wn, str):
2490 if '-' in wn: # case 'a-b' : return [a,a+1,...,b-1,b]
2491 val = wn.split('-')
2492 val = [int(val[0]), int(val[1])]
2493 val.sort()
2494 res = [i for i in xrange(val[0], val[1]+1)]
2495 elif wn[:2] == '<=' or wn[:2] == '=<': # cases '<=a','=<a' : return [0,1,...,a-1,a]
2496 val = int(wn[2:])+1
2497 res = [i for i in xrange(val)]
2498 elif wn[-2:] == '>=' or wn[-2:] == '=>': # cases 'a>=','a=>' : return [0,1,...,a-1,a]
2499 val = int(wn[:-2])+1
2500 res = [i for i in xrange(val)]
2501 elif wn[0] == '<': # case '<a' : return [0,1,...,a-2,a-1]
2502 val = int(wn[1:])
2503 res = [i for i in xrange(val)]
2504 elif wn[-1] == '>': # case 'a>' : return [0,1,...,a-2,a-1]
2505 val = int(wn[:-1])
2506 res = [i for i in xrange(val)]
2507 elif wn[:2] == '>=' or wn[:2] == '=>': # cases '>=a','=>a' : return [a,-999], which is
2508 # then interpreted in C++
2509 # side as [a,a+1,...,a_nyq]
2510 # (CAS-3759)
2511 val = int(wn[2:])
2512 res = [val, -999]
2513 #res = [i for i in xrange(val, self.nchan()/2+1)]
2514 elif wn[-2:] == '<=' or wn[-2:] == '=<': # cases 'a<=','a=<' : return [a,-999], which is
2515 # then interpreted in C++
2516 # side as [a,a+1,...,a_nyq]
2517 # (CAS-3759)
2518 val = int(wn[:-2])
2519 res = [val, -999]
2520 #res = [i for i in xrange(val, self.nchan()/2+1)]
2521 elif wn[0] == '>': # case '>a' : return [a+1,-999], which is
2522 # then interpreted in C++
2523 # side as [a+1,a+2,...,a_nyq]
2524 # (CAS-3759)
2525 val = int(wn[1:])+1
2526 res = [val, -999]
2527 #res = [i for i in xrange(val, self.nchan()/2+1)]
2528 elif wn[-1] == '<': # case 'a<' : return [a+1,-999], which is
2529 # then interpreted in C++
2530 # side as [a+1,a+2,...,a_nyq]
2531 # (CAS-3759)
2532 val = int(wn[:-1])+1
2533 res = [val, -999]
2534 #res = [i for i in xrange(val, self.nchan()/2+1)]
2535
2536 return res
2537 else:
2538 msg = 'wrong value given for addwn/rejwn'
2539 raise RuntimeError(msg)
2540
2541 @asaplog_post_dec
2542 def calc_aic(self, value=None, blfunc=None, order=None, mask=None,
2543 whichrow=None, uselinefinder=None, edge=None,
2544 threshold=None, chan_avg_limit=None):
2545 """\
2546 Calculates and returns model selection criteria for a specified
2547 baseline model and a given spectrum data.
2548 Available values include Akaike Information Criterion (AIC), the
2549 corrected Akaike Information Criterion (AICc) by Sugiura(1978),
2550 Bayesian Information Criterion (BIC) and the Generalised Cross
2551 Validation (GCV).
2552
2553 Parameters:
2554 value: name of model selection criteria to calculate.
2555 available ones include 'aic', 'aicc', 'bic' and
2556 'gcv'. default is 'aicc'.
2557 blfunc: baseline function name. available ones include
2558 'chebyshev', 'cspline' and 'sinusoid'.
2559 default is 'chebyshev'.
2560 order: parameter for basline function. actually stands for
2561 order of polynomial (order) for 'chebyshev',
2562 number of spline pieces (npiece) for 'cspline' and
2563 maximum wave number for 'sinusoid', respectively.
2564 default is 5 (which is also the default order value
2565 for [auto_]chebyshev_baseline()).
2566 mask: an optional mask. default is [].
2567 whichrow: row number. default is 0 (the first row)
2568 uselinefinder: use sd.linefinder() to flag out line regions
2569 default is True.
2570 edge: an optional number of channel to drop at
2571 the edge of spectrum. If only one value is
2572 specified, the same number will be dropped
2573 from both sides of the spectrum. Default
2574 is to keep all channels. Nested tuples
2575 represent individual edge selection for
2576 different IFs (a number of spectral channels
2577 can be different)
2578 default is (0, 0).
2579 threshold: the threshold used by line finder. It is
2580 better to keep it large as only strong lines
2581 affect the baseline solution.
2582 default is 3.
2583 chan_avg_limit: a maximum number of consequtive spectral
2584 channels to average during the search of
2585 weak and broad lines. The default is no
2586 averaging (and no search for weak lines).
2587 If such lines can affect the fitted baseline
2588 (e.g. a high order polynomial is fitted),
2589 increase this parameter (usually values up
2590 to 8 are reasonable). Most users of this
2591 method should find the default value sufficient.
2592 default is 1.
2593
2594 Example:
2595 aic = scan.calc_aic(blfunc='chebyshev', order=5, whichrow=0)
2596 """
2597
2598 try:
2599 varlist = vars()
2600
2601 if value is None: value = 'aicc'
2602 if blfunc is None: blfunc = 'chebyshev'
2603 if order is None: order = 5
2604 if mask is None: mask = []
2605 if whichrow is None: whichrow = 0
2606 if uselinefinder is None: uselinefinder = True
2607 if edge is None: edge = (0, 0)
2608 if threshold is None: threshold = 3
2609 if chan_avg_limit is None: chan_avg_limit = 1
2610
2611 return self._calc_aic(value, blfunc, order, mask,
2612 whichrow, uselinefinder, edge,
2613 threshold, chan_avg_limit)
2614
2615 except RuntimeError, e:
2616 raise_fitting_failure_exception(e)
2617
2618 @asaplog_post_dec
2619 def sinusoid_baseline(self, insitu=None, mask=None, applyfft=None,
2620 fftmethod=None, fftthresh=None,
2621 addwn=None, rejwn=None, clipthresh=None,
2622 clipniter=None, plot=None,
2623 getresidual=None, showprogress=None,
2624 minnrow=None, outlog=None, blfile=None, csvformat=None):
2625 """\
2626 Return a scan which has been baselined (all rows) with sinusoidal
2627 functions.
2628
2629 Parameters:
2630 insitu: if False a new scantable is returned.
2631 Otherwise, the scaling is done in-situ
2632 The default is taken from .asaprc (False)
2633 mask: an optional mask
2634 applyfft: if True use some method, such as FFT, to find
2635 strongest sinusoidal components in the wavenumber
2636 domain to be used for baseline fitting.
2637 default is True.
2638 fftmethod: method to find the strong sinusoidal components.
2639 now only 'fft' is available and it is the default.
2640 fftthresh: the threshold to select wave numbers to be used for
2641 fitting from the distribution of amplitudes in the
2642 wavenumber domain.
2643 both float and string values accepted.
2644 given a float value, the unit is set to sigma.
2645 for string values, allowed formats include:
2646 'xsigma' or 'x' (= x-sigma level. e.g.,
2647 '3sigma'), or
2648 'topx' (= the x strongest ones, e.g. 'top5').
2649 default is 3.0 (unit: sigma).
2650 addwn: the additional wave numbers to be used for fitting.
2651 list or integer value is accepted to specify every
2652 wave numbers. also string value can be used in case
2653 you need to specify wave numbers in a certain range,
2654 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
2655 '<a' (= 0,1,...,a-2,a-1),
2656 '>=a' (= a, a+1, ... up to the maximum wave
2657 number corresponding to the Nyquist
2658 frequency for the case of FFT).
2659 default is [0].
2660 rejwn: the wave numbers NOT to be used for fitting.
2661 can be set just as addwn but has higher priority:
2662 wave numbers which are specified both in addwn
2663 and rejwn will NOT be used. default is [].
2664 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2665 clipniter: maximum number of iteration of 'clipthresh'-sigma
2666 clipping (default is 0)
2667 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2668 plot the fit and the residual. In this each
2669 indivual fit has to be approved, by typing 'y'
2670 or 'n'
2671 getresidual: if False, returns best-fit values instead of
2672 residual. (default is True)
2673 showprogress: show progress status for large data.
2674 default is True.
2675 minnrow: minimum number of input spectra to show.
2676 default is 1000.
2677 outlog: Output the coefficients of the best-fit
2678 function to logger (default is False)
2679 blfile: Name of a text file in which the best-fit
2680 parameter values to be written
2681 (default is '': no file/logger output)
2682 csvformat: if True blfile is csv-formatted, default is False.
2683
2684 Example:
2685 # return a scan baselined by a combination of sinusoidal curves
2686 # having wave numbers in spectral window up to 10,
2687 # also with 3-sigma clipping, iteration up to 4 times
2688 bscan = scan.sinusoid_baseline(addwn='<=10',clipthresh=3.0,clipniter=4)
2689
2690 Note:
2691 The best-fit parameter values output in logger and/or blfile are now
2692 based on specunit of 'channel'.
2693 """
2694
2695 try:
2696 varlist = vars()
2697
2698 if insitu is None: insitu = rcParams['insitu']
2699 if insitu:
2700 workscan = self
2701 else:
2702 workscan = self.copy()
2703
2704 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
2705 if mask is None: mask = []
2706 if applyfft is None: applyfft = True
2707 if fftmethod is None: fftmethod = 'fft'
2708 if fftthresh is None: fftthresh = 3.0
2709 if addwn is None: addwn = [0]
2710 if rejwn is None: rejwn = []
2711 if clipthresh is None: clipthresh = 3.0
2712 if clipniter is None: clipniter = 0
2713 if plot is None: plot = False
2714 if getresidual is None: getresidual = True
2715 if showprogress is None: showprogress = True
2716 if minnrow is None: minnrow = 1000
2717 if outlog is None: outlog = False
2718 if blfile is None: blfile = ''
2719 if csvformat is None: csvformat = False
2720
2721 if csvformat:
2722 scsvformat = "T"
2723 else:
2724 scsvformat = "F"
2725
2726 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
2727 workscan._sinusoid_baseline(mask, applyfft, fftmethod.lower(),
2728 str(fftthresh).lower(),
2729 workscan._parse_wn(addwn),
2730 workscan._parse_wn(rejwn),
2731 clipthresh, clipniter,
2732 getresidual,
2733 pack_progress_params(showprogress,
2734 minnrow),
2735 outlog, scsvformat+blfile)
2736 workscan._add_history('sinusoid_baseline', varlist)
2737
2738 if insitu:
2739 self._assign(workscan)
2740 else:
2741 return workscan
2742
2743 except RuntimeError, e:
2744 raise_fitting_failure_exception(e)
2745
2746
2747 @asaplog_post_dec
2748 def auto_sinusoid_baseline(self, insitu=None, mask=None, applyfft=None,
2749 fftmethod=None, fftthresh=None,
2750 addwn=None, rejwn=None, clipthresh=None,
2751 clipniter=None, edge=None, threshold=None,
2752 chan_avg_limit=None, plot=None,
2753 getresidual=None, showprogress=None,
2754 minnrow=None, outlog=None, blfile=None, csvformat=None):
2755 """\
2756 Return a scan which has been baselined (all rows) with sinusoidal
2757 functions.
2758 Spectral lines are detected first using linefinder and masked out
2759 to avoid them affecting the baseline solution.
2760
2761 Parameters:
2762 insitu: if False a new scantable is returned.
2763 Otherwise, the scaling is done in-situ
2764 The default is taken from .asaprc (False)
2765 mask: an optional mask retreived from scantable
2766 applyfft: if True use some method, such as FFT, to find
2767 strongest sinusoidal components in the wavenumber
2768 domain to be used for baseline fitting.
2769 default is True.
2770 fftmethod: method to find the strong sinusoidal components.
2771 now only 'fft' is available and it is the default.
2772 fftthresh: the threshold to select wave numbers to be used for
2773 fitting from the distribution of amplitudes in the
2774 wavenumber domain.
2775 both float and string values accepted.
2776 given a float value, the unit is set to sigma.
2777 for string values, allowed formats include:
2778 'xsigma' or 'x' (= x-sigma level. e.g.,
2779 '3sigma'), or
2780 'topx' (= the x strongest ones, e.g. 'top5').
2781 default is 3.0 (unit: sigma).
2782 addwn: the additional wave numbers to be used for fitting.
2783 list or integer value is accepted to specify every
2784 wave numbers. also string value can be used in case
2785 you need to specify wave numbers in a certain range,
2786 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
2787 '<a' (= 0,1,...,a-2,a-1),
2788 '>=a' (= a, a+1, ... up to the maximum wave
2789 number corresponding to the Nyquist
2790 frequency for the case of FFT).
2791 default is [0].
2792 rejwn: the wave numbers NOT to be used for fitting.
2793 can be set just as addwn but has higher priority:
2794 wave numbers which are specified both in addwn
2795 and rejwn will NOT be used. default is [].
2796 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2797 clipniter: maximum number of iteration of 'clipthresh'-sigma
2798 clipping (default is 0)
2799 edge: an optional number of channel to drop at
2800 the edge of spectrum. If only one value is
2801 specified, the same number will be dropped
2802 from both sides of the spectrum. Default
2803 is to keep all channels. Nested tuples
2804 represent individual edge selection for
2805 different IFs (a number of spectral channels
2806 can be different)
2807 threshold: the threshold used by line finder. It is
2808 better to keep it large as only strong lines
2809 affect the baseline solution.
2810 chan_avg_limit: a maximum number of consequtive spectral
2811 channels to average during the search of
2812 weak and broad lines. The default is no
2813 averaging (and no search for weak lines).
2814 If such lines can affect the fitted baseline
2815 (e.g. a high order polynomial is fitted),
2816 increase this parameter (usually values up
2817 to 8 are reasonable). Most users of this
2818 method should find the default value sufficient.
2819 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2820 plot the fit and the residual. In this each
2821 indivual fit has to be approved, by typing 'y'
2822 or 'n'
2823 getresidual: if False, returns best-fit values instead of
2824 residual. (default is True)
2825 showprogress: show progress status for large data.
2826 default is True.
2827 minnrow: minimum number of input spectra to show.
2828 default is 1000.
2829 outlog: Output the coefficients of the best-fit
2830 function to logger (default is False)
2831 blfile: Name of a text file in which the best-fit
2832 parameter values to be written
2833 (default is "": no file/logger output)
2834 csvformat: if True blfile is csv-formatted, default is False.
2835
2836 Example:
2837 bscan = scan.auto_sinusoid_baseline(addwn='<=10', insitu=False)
2838
2839 Note:
2840 The best-fit parameter values output in logger and/or blfile are now
2841 based on specunit of 'channel'.
2842 """
2843
2844 try:
2845 varlist = vars()
2846
2847 if insitu is None: insitu = rcParams['insitu']
2848 if insitu:
2849 workscan = self
2850 else:
2851 workscan = self.copy()
2852
2853 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
2854 if mask is None: mask = []
2855 if applyfft is None: applyfft = True
2856 if fftmethod is None: fftmethod = 'fft'
2857 if fftthresh is None: fftthresh = 3.0
2858 if addwn is None: addwn = [0]
2859 if rejwn is None: rejwn = []
2860 if clipthresh is None: clipthresh = 3.0
2861 if clipniter is None: clipniter = 0
2862 if edge is None: edge = (0,0)
2863 if threshold is None: threshold = 3
2864 if chan_avg_limit is None: chan_avg_limit = 1
2865 if plot is None: plot = False
2866 if getresidual is None: getresidual = True
2867 if showprogress is None: showprogress = True
2868 if minnrow is None: minnrow = 1000
2869 if outlog is None: outlog = False
2870 if blfile is None: blfile = ''
2871 if csvformat is None: csvformat = False
2872
2873 if csvformat:
2874 scsvformat = "T"
2875 else:
2876 scsvformat = "F"
2877
2878 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
2879 workscan._auto_sinusoid_baseline(mask, applyfft,
2880 fftmethod.lower(),
2881 str(fftthresh).lower(),
2882 workscan._parse_wn(addwn),
2883 workscan._parse_wn(rejwn),
2884 clipthresh, clipniter,
2885 normalise_edge_param(edge),
2886 threshold, chan_avg_limit,
2887 getresidual,
2888 pack_progress_params(showprogress,
2889 minnrow),
2890 outlog, scsvformat+blfile)
2891 workscan._add_history("auto_sinusoid_baseline", varlist)
2892
2893 if insitu:
2894 self._assign(workscan)
2895 else:
2896 return workscan
2897
2898 except RuntimeError, e:
2899 raise_fitting_failure_exception(e)
2900
2901 @asaplog_post_dec
2902 def cspline_baseline(self, insitu=None, mask=None, npiece=None,
2903 clipthresh=None, clipniter=None, plot=None,
2904 getresidual=None, showprogress=None, minnrow=None,
2905 outlog=None, blfile=None, csvformat=None):
2906 """\
2907 Return a scan which has been baselined (all rows) by cubic spline
2908 function (piecewise cubic polynomial).
2909
2910 Parameters:
2911 insitu: If False a new scantable is returned.
2912 Otherwise, the scaling is done in-situ
2913 The default is taken from .asaprc (False)
2914 mask: An optional mask
2915 npiece: Number of pieces. (default is 2)
2916 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2917 clipniter: maximum number of iteration of 'clipthresh'-sigma
2918 clipping (default is 0)
2919 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2920 plot the fit and the residual. In this each
2921 indivual fit has to be approved, by typing 'y'
2922 or 'n'
2923 getresidual: if False, returns best-fit values instead of
2924 residual. (default is True)
2925 showprogress: show progress status for large data.
2926 default is True.
2927 minnrow: minimum number of input spectra to show.
2928 default is 1000.
2929 outlog: Output the coefficients of the best-fit
2930 function to logger (default is False)
2931 blfile: Name of a text file in which the best-fit
2932 parameter values to be written
2933 (default is "": no file/logger output)
2934 csvformat: if True blfile is csv-formatted, default is False.
2935
2936 Example:
2937 # return a scan baselined by a cubic spline consisting of 2 pieces
2938 # (i.e., 1 internal knot),
2939 # also with 3-sigma clipping, iteration up to 4 times
2940 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
2941
2942 Note:
2943 The best-fit parameter values output in logger and/or blfile are now
2944 based on specunit of 'channel'.
2945 """
2946
2947 try:
2948 varlist = vars()
2949
2950 if insitu is None: insitu = rcParams['insitu']
2951 if insitu:
2952 workscan = self
2953 else:
2954 workscan = self.copy()
2955
2956 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
2957 if mask is None: mask = []
2958 if npiece is None: npiece = 2
2959 if clipthresh is None: clipthresh = 3.0
2960 if clipniter is None: clipniter = 0
2961 if plot is None: plot = False
2962 if getresidual is None: getresidual = True
2963 if showprogress is None: showprogress = True
2964 if minnrow is None: minnrow = 1000
2965 if outlog is None: outlog = False
2966 if blfile is None: blfile = ''
2967 if csvformat is None: csvformat = False
2968
2969 if csvformat:
2970 scsvformat = "T"
2971 else:
2972 scsvformat = "F"
2973
2974 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
2975 workscan._cspline_baseline(mask, npiece, clipthresh, clipniter,
2976 getresidual,
2977 pack_progress_params(showprogress,
2978 minnrow),
2979 outlog, scsvformat+blfile)
2980 workscan._add_history("cspline_baseline", varlist)
2981
2982 if insitu:
2983 self._assign(workscan)
2984 else:
2985 return workscan
2986
2987 except RuntimeError, e:
2988 raise_fitting_failure_exception(e)
2989
2990 @asaplog_post_dec
2991 def auto_cspline_baseline(self, insitu=None, mask=None, npiece=None,
2992 clipthresh=None, clipniter=None,
2993 edge=None, threshold=None, chan_avg_limit=None,
2994 getresidual=None, plot=None,
2995 showprogress=None, minnrow=None, outlog=None,
2996 blfile=None, csvformat=None):
2997 """\
2998 Return a scan which has been baselined (all rows) by cubic spline
2999 function (piecewise cubic polynomial).
3000 Spectral lines are detected first using linefinder and masked out
3001 to avoid them affecting the baseline solution.
3002
3003 Parameters:
3004 insitu: if False a new scantable is returned.
3005 Otherwise, the scaling is done in-situ
3006 The default is taken from .asaprc (False)
3007 mask: an optional mask retreived from scantable
3008 npiece: Number of pieces. (default is 2)
3009 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3010 clipniter: maximum number of iteration of 'clipthresh'-sigma
3011 clipping (default is 0)
3012 edge: an optional number of channel to drop at
3013 the edge of spectrum. If only one value is
3014 specified, the same number will be dropped
3015 from both sides of the spectrum. Default
3016 is to keep all channels. Nested tuples
3017 represent individual edge selection for
3018 different IFs (a number of spectral channels
3019 can be different)
3020 threshold: the threshold used by line finder. It is
3021 better to keep it large as only strong lines
3022 affect the baseline solution.
3023 chan_avg_limit: a maximum number of consequtive spectral
3024 channels to average during the search of
3025 weak and broad lines. The default is no
3026 averaging (and no search for weak lines).
3027 If such lines can affect the fitted baseline
3028 (e.g. a high order polynomial is fitted),
3029 increase this parameter (usually values up
3030 to 8 are reasonable). Most users of this
3031 method should find the default value sufficient.
3032 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3033 plot the fit and the residual. In this each
3034 indivual fit has to be approved, by typing 'y'
3035 or 'n'
3036 getresidual: if False, returns best-fit values instead of
3037 residual. (default is True)
3038 showprogress: show progress status for large data.
3039 default is True.
3040 minnrow: minimum number of input spectra to show.
3041 default is 1000.
3042 outlog: Output the coefficients of the best-fit
3043 function to logger (default is False)
3044 blfile: Name of a text file in which the best-fit
3045 parameter values to be written
3046 (default is "": no file/logger output)
3047 csvformat: if True blfile is csv-formatted, default is False.
3048
3049 Example:
3050 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
3051
3052 Note:
3053 The best-fit parameter values output in logger and/or blfile are now
3054 based on specunit of 'channel'.
3055 """
3056
3057 try:
3058 varlist = vars()
3059
3060 if insitu is None: insitu = rcParams['insitu']
3061 if insitu:
3062 workscan = self
3063 else:
3064 workscan = self.copy()
3065
3066 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
3067 if mask is None: mask = []
3068 if npiece is None: npiece = 2
3069 if clipthresh is None: clipthresh = 3.0
3070 if clipniter is None: clipniter = 0
3071 if edge is None: edge = (0, 0)
3072 if threshold is None: threshold = 3
3073 if chan_avg_limit is None: chan_avg_limit = 1
3074 if plot is None: plot = False
3075 if getresidual is None: getresidual = True
3076 if showprogress is None: showprogress = True
3077 if minnrow is None: minnrow = 1000
3078 if outlog is None: outlog = False
3079 if blfile is None: blfile = ''
3080 if csvformat is None: csvformat = False
3081
3082 if csvformat:
3083 scsvformat = "T"
3084 else:
3085 scsvformat = "F"
3086
3087 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
3088 workscan._auto_cspline_baseline(mask, npiece, clipthresh,
3089 clipniter,
3090 normalise_edge_param(edge),
3091 threshold,
3092 chan_avg_limit, getresidual,
3093 pack_progress_params(showprogress,
3094 minnrow),
3095 outlog, scsvformat+blfile)
3096 workscan._add_history("auto_cspline_baseline", varlist)
3097
3098 if insitu:
3099 self._assign(workscan)
3100 else:
3101 return workscan
3102
3103 except RuntimeError, e:
3104 raise_fitting_failure_exception(e)
3105
3106 @asaplog_post_dec
3107 def chebyshev_baseline(self, insitu=None, mask=None, order=None,
3108 clipthresh=None, clipniter=None, plot=None,
3109 getresidual=None, showprogress=None, minnrow=None,
3110 outlog=None, blfile=None, csvformat=None):
3111 """\
3112 Return a scan which has been baselined (all rows) by Chebyshev polynomials.
3113
3114 Parameters:
3115 insitu: If False a new scantable is returned.
3116 Otherwise, the scaling is done in-situ
3117 The default is taken from .asaprc (False)
3118 mask: An optional mask
3119 order: the maximum order of Chebyshev polynomial (default is 5)
3120 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3121 clipniter: maximum number of iteration of 'clipthresh'-sigma
3122 clipping (default is 0)
3123 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3124 plot the fit and the residual. In this each
3125 indivual fit has to be approved, by typing 'y'
3126 or 'n'
3127 getresidual: if False, returns best-fit values instead of
3128 residual. (default is True)
3129 showprogress: show progress status for large data.
3130 default is True.
3131 minnrow: minimum number of input spectra to show.
3132 default is 1000.
3133 outlog: Output the coefficients of the best-fit
3134 function to logger (default is False)
3135 blfile: Name of a text file in which the best-fit
3136 parameter values to be written
3137 (default is "": no file/logger output)
3138 csvformat: if True blfile is csv-formatted, default is False.
3139
3140 Example:
3141 # return a scan baselined by a cubic spline consisting of 2 pieces
3142 # (i.e., 1 internal knot),
3143 # also with 3-sigma clipping, iteration up to 4 times
3144 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
3145
3146 Note:
3147 The best-fit parameter values output in logger and/or blfile are now
3148 based on specunit of 'channel'.
3149 """
3150
3151 try:
3152 varlist = vars()
3153
3154 if insitu is None: insitu = rcParams['insitu']
3155 if insitu:
3156 workscan = self
3157 else:
3158 workscan = self.copy()
3159
3160 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
3161 if mask is None: mask = []
3162 if order is None: order = 5
3163 if clipthresh is None: clipthresh = 3.0
3164 if clipniter is None: clipniter = 0
3165 if plot is None: plot = False
3166 if getresidual is None: getresidual = True
3167 if showprogress is None: showprogress = True
3168 if minnrow is None: minnrow = 1000
3169 if outlog is None: outlog = False
3170 if blfile is None: blfile = ''
3171 if csvformat is None: csvformat = False
3172
3173 if csvformat:
3174 scsvformat = "T"
3175 else:
3176 scsvformat = "F"
3177
3178 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
3179 workscan._chebyshev_baseline(mask, order, clipthresh, clipniter,
3180 getresidual,
3181 pack_progress_params(showprogress,
3182 minnrow),
3183 outlog, scsvformat+blfile)
3184 workscan._add_history("chebyshev_baseline", varlist)
3185
3186 if insitu:
3187 self._assign(workscan)
3188 else:
3189 return workscan
3190
3191 except RuntimeError, e:
3192 raise_fitting_failure_exception(e)
3193
3194 @asaplog_post_dec
3195 def auto_chebyshev_baseline(self, insitu=None, mask=None, order=None,
3196 clipthresh=None, clipniter=None,
3197 edge=None, threshold=None, chan_avg_limit=None,
3198 getresidual=None, plot=None,
3199 showprogress=None, minnrow=None, outlog=None,
3200 blfile=None, csvformat=None):
3201 """\
3202 Return a scan which has been baselined (all rows) by Chebyshev polynomials.
3203 Spectral lines are detected first using linefinder and masked out
3204 to avoid them affecting the baseline solution.
3205
3206 Parameters:
3207 insitu: if False a new scantable is returned.
3208 Otherwise, the scaling is done in-situ
3209 The default is taken from .asaprc (False)
3210 mask: an optional mask retreived from scantable
3211 order: the maximum order of Chebyshev polynomial (default is 5)
3212 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3213 clipniter: maximum number of iteration of 'clipthresh'-sigma
3214 clipping (default is 0)
3215 edge: an optional number of channel to drop at
3216 the edge of spectrum. If only one value is
3217 specified, the same number will be dropped
3218 from both sides of the spectrum. Default
3219 is to keep all channels. Nested tuples
3220 represent individual edge selection for
3221 different IFs (a number of spectral channels
3222 can be different)
3223 threshold: the threshold used by line finder. It is
3224 better to keep it large as only strong lines
3225 affect the baseline solution.
3226 chan_avg_limit: a maximum number of consequtive spectral
3227 channels to average during the search of
3228 weak and broad lines. The default is no
3229 averaging (and no search for weak lines).
3230 If such lines can affect the fitted baseline
3231 (e.g. a high order polynomial is fitted),
3232 increase this parameter (usually values up
3233 to 8 are reasonable). Most users of this
3234 method should find the default value sufficient.
3235 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3236 plot the fit and the residual. In this each
3237 indivual fit has to be approved, by typing 'y'
3238 or 'n'
3239 getresidual: if False, returns best-fit values instead of
3240 residual. (default is True)
3241 showprogress: show progress status for large data.
3242 default is True.
3243 minnrow: minimum number of input spectra to show.
3244 default is 1000.
3245 outlog: Output the coefficients of the best-fit
3246 function to logger (default is False)
3247 blfile: Name of a text file in which the best-fit
3248 parameter values to be written
3249 (default is "": no file/logger output)
3250 csvformat: if True blfile is csv-formatted, default is False.
3251
3252 Example:
3253 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
3254
3255 Note:
3256 The best-fit parameter values output in logger and/or blfile are now
3257 based on specunit of 'channel'.
3258 """
3259
3260 try:
3261 varlist = vars()
3262
3263 if insitu is None: insitu = rcParams['insitu']
3264 if insitu:
3265 workscan = self
3266 else:
3267 workscan = self.copy()
3268
3269 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
3270 if mask is None: mask = []
3271 if order is None: order = 5
3272 if clipthresh is None: clipthresh = 3.0
3273 if clipniter is None: clipniter = 0
3274 if edge is None: edge = (0, 0)
3275 if threshold is None: threshold = 3
3276 if chan_avg_limit is None: chan_avg_limit = 1
3277 if plot is None: plot = False
3278 if getresidual is None: getresidual = True
3279 if showprogress is None: showprogress = True
3280 if minnrow is None: minnrow = 1000
3281 if outlog is None: outlog = False
3282 if blfile is None: blfile = ''
3283 if csvformat is None: csvformat = False
3284
3285 if csvformat:
3286 scsvformat = "T"
3287 else:
3288 scsvformat = "F"
3289
3290 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
3291 workscan._auto_chebyshev_baseline(mask, order, clipthresh,
3292 clipniter,
3293 normalise_edge_param(edge),
3294 threshold,
3295 chan_avg_limit, getresidual,
3296 pack_progress_params(showprogress,
3297 minnrow),
3298 outlog, scsvformat+blfile)
3299 workscan._add_history("auto_chebyshev_baseline", varlist)
3300
3301 if insitu:
3302 self._assign(workscan)
3303 else:
3304 return workscan
3305
3306 except RuntimeError, e:
3307 raise_fitting_failure_exception(e)
3308
3309 @asaplog_post_dec
3310 def poly_baseline(self, mask=None, order=None, insitu=None, plot=None,
3311 getresidual=None, showprogress=None, minnrow=None,
3312 outlog=None, blfile=None, csvformat=None):
3313 """\
3314 Return a scan which has been baselined (all rows) by a polynomial.
3315 Parameters:
3316 insitu: if False a new scantable is returned.
3317 Otherwise, the scaling is done in-situ
3318 The default is taken from .asaprc (False)
3319 mask: an optional mask
3320 order: the order of the polynomial (default is 0)
3321 plot: plot the fit and the residual. In this each
3322 indivual fit has to be approved, by typing 'y'
3323 or 'n'
3324 getresidual: if False, returns best-fit values instead of
3325 residual. (default is True)
3326 showprogress: show progress status for large data.
3327 default is True.
3328 minnrow: minimum number of input spectra to show.
3329 default is 1000.
3330 outlog: Output the coefficients of the best-fit
3331 function to logger (default is False)
3332 blfile: Name of a text file in which the best-fit
3333 parameter values to be written
3334 (default is "": no file/logger output)
3335 csvformat: if True blfile is csv-formatted, default is False.
3336
3337 Example:
3338 # return a scan baselined by a third order polynomial,
3339 # not using a mask
3340 bscan = scan.poly_baseline(order=3)
3341 """
3342
3343 try:
3344 varlist = vars()
3345
3346 if insitu is None:
3347 insitu = rcParams["insitu"]
3348 if insitu:
3349 workscan = self
3350 else:
3351 workscan = self.copy()
3352
3353 #if mask is None: mask = [True for i in \
3354 # xrange(workscan.nchan())]
3355 if mask is None: mask = []
3356 if order is None: order = 0
3357 if plot is None: plot = False
3358 if getresidual is None: getresidual = True
3359 if showprogress is None: showprogress = True
3360 if minnrow is None: minnrow = 1000
3361 if outlog is None: outlog = False
3362 if blfile is None: blfile = ""
3363 if csvformat is None: csvformat = False
3364
3365 if csvformat:
3366 scsvformat = "T"
3367 else:
3368 scsvformat = "F"
3369
3370 if plot:
3371 outblfile = (blfile != "") and \
3372 os.path.exists(os.path.expanduser(
3373 os.path.expandvars(blfile))
3374 )
3375 if outblfile:
3376 blf = open(blfile, "a")
3377
3378 f = fitter()
3379 f.set_function(lpoly=order)
3380
3381 rows = xrange(workscan.nrow())
3382 #if len(rows) > 0: workscan._init_blinfo()
3383
3384 action = "H"
3385 for r in rows:
3386 f.x = workscan._getabcissa(r)
3387 f.y = workscan._getspectrum(r)
3388 if mask:
3389 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
3390 else: # mask=None
3391 f.mask = workscan._getmask(r)
3392
3393 f.data = None
3394 f.fit()
3395
3396 if action != "Y": # skip plotting when accepting all
3397 f.plot(residual=True)
3398 #accept_fit = raw_input("Accept fit ( [y]/n ): ")
3399 #if accept_fit.upper() == "N":
3400 # #workscan._append_blinfo(None, None, None)
3401 # continue
3402 accept_fit = self._get_verify_action("Accept fit?",action)
3403 if r == 0: action = None
3404 if accept_fit.upper() == "N":
3405 continue
3406 elif accept_fit.upper() == "R":
3407 break
3408 elif accept_fit.upper() == "A":
3409 action = "Y"
3410
3411 blpars = f.get_parameters()
3412 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
3413 #workscan._append_blinfo(blpars, masklist, f.mask)
3414 workscan._setspectrum((f.fitter.getresidual()
3415 if getresidual else f.fitter.getfit()), r)
3416
3417 if outblfile:
3418 rms = workscan.get_rms(f.mask, r)
3419 dataout = \
3420 workscan.format_blparams_row(blpars["params"],
3421 blpars["fixed"],
3422 rms, str(masklist),
3423 r, True, csvformat)
3424 blf.write(dataout)
3425
3426 f._p.unmap()
3427 f._p = None
3428
3429 if outblfile:
3430 blf.close()
3431 else:
3432 workscan._poly_baseline(mask, order, getresidual,
3433 pack_progress_params(showprogress,
3434 minnrow),
3435 outlog, scsvformat+blfile)
3436
3437 workscan._add_history("poly_baseline", varlist)
3438
3439 if insitu:
3440 self._assign(workscan)
3441 else:
3442 return workscan
3443
3444 except RuntimeError, e:
3445 raise_fitting_failure_exception(e)
3446
3447 @asaplog_post_dec
3448 def auto_poly_baseline(self, mask=None, order=None, edge=None,
3449 threshold=None, chan_avg_limit=None,
3450 plot=None, insitu=None,
3451 getresidual=None, showprogress=None,
3452 minnrow=None, outlog=None, blfile=None, csvformat=None):
3453 """\
3454 Return a scan which has been baselined (all rows) by a polynomial.
3455 Spectral lines are detected first using linefinder and masked out
3456 to avoid them affecting the baseline solution.
3457
3458 Parameters:
3459 insitu: if False a new scantable is returned.
3460 Otherwise, the scaling is done in-situ
3461 The default is taken from .asaprc (False)
3462 mask: an optional mask retreived from scantable
3463 order: the order of the polynomial (default is 0)
3464 edge: an optional number of channel to drop at
3465 the edge of spectrum. If only one value is
3466 specified, the same number will be dropped
3467 from both sides of the spectrum. Default
3468 is to keep all channels. Nested tuples
3469 represent individual edge selection for
3470 different IFs (a number of spectral channels
3471 can be different)
3472 threshold: the threshold used by line finder. It is
3473 better to keep it large as only strong lines
3474 affect the baseline solution.
3475 chan_avg_limit: a maximum number of consequtive spectral
3476 channels to average during the search of
3477 weak and broad lines. The default is no
3478 averaging (and no search for weak lines).
3479 If such lines can affect the fitted baseline
3480 (e.g. a high order polynomial is fitted),
3481 increase this parameter (usually values up
3482 to 8 are reasonable). Most users of this
3483 method should find the default value sufficient.
3484 plot: plot the fit and the residual. In this each
3485 indivual fit has to be approved, by typing 'y'
3486 or 'n'
3487 getresidual: if False, returns best-fit values instead of
3488 residual. (default is True)
3489 showprogress: show progress status for large data.
3490 default is True.
3491 minnrow: minimum number of input spectra to show.
3492 default is 1000.
3493 outlog: Output the coefficients of the best-fit
3494 function to logger (default is False)
3495 blfile: Name of a text file in which the best-fit
3496 parameter values to be written
3497 (default is "": no file/logger output)
3498 csvformat: if True blfile is csv-formatted, default is False.
3499
3500 Example:
3501 bscan = scan.auto_poly_baseline(order=7, insitu=False)
3502 """
3503
3504 try:
3505 varlist = vars()
3506
3507 if insitu is None:
3508 insitu = rcParams['insitu']
3509 if insitu:
3510 workscan = self
3511 else:
3512 workscan = self.copy()
3513
3514 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
3515 if mask is None: mask = []
3516 if order is None: order = 0
3517 if edge is None: edge = (0, 0)
3518 if threshold is None: threshold = 3
3519 if chan_avg_limit is None: chan_avg_limit = 1
3520 if plot is None: plot = False
3521 if getresidual is None: getresidual = True
3522 if showprogress is None: showprogress = True
3523 if minnrow is None: minnrow = 1000
3524 if outlog is None: outlog = False
3525 if blfile is None: blfile = ''
3526 if csvformat is None: csvformat = False
3527
3528 if csvformat:
3529 scsvformat = "T"
3530 else:
3531 scsvformat = "F"
3532
3533 edge = normalise_edge_param(edge)
3534
3535 if plot:
3536 outblfile = (blfile != "") and \
3537 os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
3538 if outblfile: blf = open(blfile, "a")
3539
3540 from asap.asaplinefind import linefinder
3541 fl = linefinder()
3542 fl.set_options(threshold=threshold, avg_limit=chan_avg_limit)
3543 fl.set_scan(workscan)
3544
3545 f = fitter()
3546 f.set_function(lpoly=order)
3547
3548 rows = xrange(workscan.nrow())
3549 #if len(rows) > 0: workscan._init_blinfo()
3550
3551 action = "H"
3552 for r in rows:
3553 idx = 2*workscan.getif(r)
3554 if mask:
3555 msk = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
3556 else: # mask=None
3557 msk = workscan._getmask(r)
3558 fl.find_lines(r, msk, edge[idx:idx+2])
3559
3560 f.x = workscan._getabcissa(r)
3561 f.y = workscan._getspectrum(r)
3562 f.mask = fl.get_mask()
3563 f.data = None
3564 f.fit()
3565
3566 if action != "Y": # skip plotting when accepting all
3567 f.plot(residual=True)
3568 #accept_fit = raw_input("Accept fit ( [y]/n ): ")
3569 accept_fit = self._get_verify_action("Accept fit?",action)
3570 if r == 0: action = None
3571 if accept_fit.upper() == "N":
3572 #workscan._append_blinfo(None, None, None)
3573 continue
3574 elif accept_fit.upper() == "R":
3575 break
3576 elif accept_fit.upper() == "A":
3577 action = "Y"
3578
3579 blpars = f.get_parameters()
3580 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
3581 #workscan._append_blinfo(blpars, masklist, f.mask)
3582 workscan._setspectrum(
3583 (f.fitter.getresidual() if getresidual
3584 else f.fitter.getfit()), r
3585 )
3586
3587 if outblfile:
3588 rms = workscan.get_rms(f.mask, r)
3589 dataout = \
3590 workscan.format_blparams_row(blpars["params"],
3591 blpars["fixed"],
3592 rms, str(masklist),
3593 r, True, csvformat)
3594 blf.write(dataout)
3595
3596 f._p.unmap()
3597 f._p = None
3598
3599 if outblfile: blf.close()
3600 else:
3601 workscan._auto_poly_baseline(mask, order, edge, threshold,
3602 chan_avg_limit, getresidual,
3603 pack_progress_params(showprogress,
3604 minnrow),
3605 outlog, scsvformat+blfile)
3606
3607 workscan._add_history("auto_poly_baseline", varlist)
3608
3609 if insitu:
3610 self._assign(workscan)
3611 else:
3612 return workscan
3613
3614 except RuntimeError, e:
3615 raise_fitting_failure_exception(e)
3616
3617 def _init_blinfo(self):
3618 """\
3619 Initialise the following three auxiliary members:
3620 blpars : parameters of the best-fit baseline,
3621 masklists : mask data (edge positions of masked channels) and
3622 actualmask : mask data (in boolean list),
3623 to keep for use later (including output to logger/text files).
3624 Used by poly_baseline() and auto_poly_baseline() in case of
3625 'plot=True'.
3626 """
3627 self.blpars = []
3628 self.masklists = []
3629 self.actualmask = []
3630 return
3631
3632 def _append_blinfo(self, data_blpars, data_masklists, data_actualmask):
3633 """\
3634 Append baseline-fitting related info to blpars, masklist and
3635 actualmask.
3636 """
3637 self.blpars.append(data_blpars)
3638 self.masklists.append(data_masklists)
3639 self.actualmask.append(data_actualmask)
3640 return
3641
3642 @asaplog_post_dec
3643 def rotate_linpolphase(self, angle):
3644 """\
3645 Rotate the phase of the complex polarization O=Q+iU correlation.
3646 This is always done in situ in the raw data. So if you call this
3647 function more than once then each call rotates the phase further.
3648
3649 Parameters:
3650
3651 angle: The angle (degrees) to rotate (add) by.
3652
3653 Example::
3654
3655 scan.rotate_linpolphase(2.3)
3656
3657 """
3658 varlist = vars()
3659 self._math._rotate_linpolphase(self, angle)
3660 self._add_history("rotate_linpolphase", varlist)
3661 return
3662
3663 @asaplog_post_dec
3664 def rotate_xyphase(self, angle):
3665 """\
3666 Rotate the phase of the XY correlation. This is always done in situ
3667 in the data. So if you call this function more than once
3668 then each call rotates the phase further.
3669
3670 Parameters:
3671
3672 angle: The angle (degrees) to rotate (add) by.
3673
3674 Example::
3675
3676 scan.rotate_xyphase(2.3)
3677
3678 """
3679 varlist = vars()
3680 self._math._rotate_xyphase(self, angle)
3681 self._add_history("rotate_xyphase", varlist)
3682 return
3683
3684 @asaplog_post_dec
3685 def swap_linears(self):
3686 """\
3687 Swap the linear polarisations XX and YY, or better the first two
3688 polarisations as this also works for ciculars.
3689 """
3690 varlist = vars()
3691 self._math._swap_linears(self)
3692 self._add_history("swap_linears", varlist)
3693 return
3694
3695 @asaplog_post_dec
3696 def invert_phase(self):
3697 """\
3698 Invert the phase of the complex polarisation
3699 """
3700 varlist = vars()
3701 self._math._invert_phase(self)
3702 self._add_history("invert_phase", varlist)
3703 return
3704
3705 @asaplog_post_dec
3706 def add(self, offset, insitu=None):
3707 """\
3708 Return a scan where all spectra have the offset added
3709
3710 Parameters:
3711
3712 offset: the offset
3713
3714 insitu: if False a new scantable is returned.
3715 Otherwise, the scaling is done in-situ
3716 The default is taken from .asaprc (False)
3717
3718 """
3719 if insitu is None: insitu = rcParams['insitu']
3720 self._math._setinsitu(insitu)
3721 varlist = vars()
3722 s = scantable(self._math._unaryop(self, offset, "ADD", False))
3723 s._add_history("add", varlist)
3724 if insitu:
3725 self._assign(s)
3726 else:
3727 return s
3728
3729 @asaplog_post_dec
3730 def scale(self, factor, tsys=True, insitu=None):
3731 """\
3732
3733 Return a scan where all spectra are scaled by the given 'factor'
3734
3735 Parameters:
3736
3737 factor: the scaling factor (float or 1D float list)
3738
3739 insitu: if False a new scantable is returned.
3740 Otherwise, the scaling is done in-situ
3741 The default is taken from .asaprc (False)
3742
3743 tsys: if True (default) then apply the operation to Tsys
3744 as well as the data
3745
3746 """
3747 if insitu is None: insitu = rcParams['insitu']
3748 self._math._setinsitu(insitu)
3749 varlist = vars()
3750 s = None
3751 import numpy
3752 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
3753 if isinstance(factor[0], list) or isinstance(factor[0],
3754 numpy.ndarray):
3755 from asapmath import _array2dOp
3756 s = _array2dOp( self, factor, "MUL", tsys, insitu )
3757 else:
3758 s = scantable( self._math._arrayop( self, factor,
3759 "MUL", tsys ) )
3760 else:
3761 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
3762 s._add_history("scale", varlist)
3763 if insitu:
3764 self._assign(s)
3765 else:
3766 return s
3767
3768 @preserve_selection
3769 def set_sourcetype(self, match, matchtype="pattern",
3770 sourcetype="reference"):
3771 """\
3772 Set the type of the source to be an source or reference scan
3773 using the provided pattern.
3774
3775 Parameters:
3776
3777 match: a Unix style pattern, regular expression or selector
3778
3779 matchtype: 'pattern' (default) UNIX style pattern or
3780 'regex' regular expression
3781
3782 sourcetype: the type of the source to use (source/reference)
3783
3784 """
3785 varlist = vars()
3786 stype = -1
3787 if sourcetype.lower().startswith("r") or sourcetype.lower() == "off":
3788 stype = 1
3789 elif sourcetype.lower().startswith("s") or sourcetype.lower() == "on":
3790 stype = 0
3791 else:
3792 raise ValueError("Illegal sourcetype use s(ource)/on or r(eference)/off")
3793 if matchtype.lower().startswith("p"):
3794 matchtype = "pattern"
3795 elif matchtype.lower().startswith("r"):
3796 matchtype = "regex"
3797 else:
3798 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
3799 sel = selector()
3800 if isinstance(match, selector):
3801 sel = match
3802 else:
3803 sel.set_query("SRCNAME=%s('%s')" % (matchtype, match))
3804 self.set_selection(sel)
3805 self._setsourcetype(stype)
3806 self._add_history("set_sourcetype", varlist)
3807
3808 @asaplog_post_dec
3809 @preserve_selection
3810 def auto_quotient(self, preserve=True, mode='paired', verify=False):
3811 """\
3812 This function allows to build quotients automatically.
3813 It assumes the observation to have the same number of
3814 "ons" and "offs"
3815
3816 Parameters:
3817
3818 preserve: you can preserve (default) the continuum or
3819 remove it. The equations used are
3820
3821 preserve: Output = Toff * (on/off) - Toff
3822
3823 remove: Output = Toff * (on/off) - Ton
3824
3825 mode: the on/off detection mode
3826 'paired' (default)
3827 identifies 'off' scans by the
3828 trailing '_R' (Mopra/Parkes) or
3829 '_e'/'_w' (Tid) and matches
3830 on/off pairs from the observing pattern
3831 'time'
3832 finds the closest off in time
3833
3834 .. todo:: verify argument is not implemented
3835
3836 """
3837 varlist = vars()
3838 modes = ["time", "paired"]
3839 if not mode in modes:
3840 msg = "please provide valid mode. Valid modes are %s" % (modes)
3841 raise ValueError(msg)
3842 s = None
3843 if mode.lower() == "paired":
3844 sel = self.get_selection()
3845 sel.set_query("SRCTYPE==psoff")
3846 self.set_selection(sel)
3847 offs = self.copy()
3848 sel.set_query("SRCTYPE==pson")
3849 self.set_selection(sel)
3850 ons = self.copy()
3851 s = scantable(self._math._quotient(ons, offs, preserve))
3852 elif mode.lower() == "time":
3853 s = scantable(self._math._auto_quotient(self, mode, preserve))
3854 s._add_history("auto_quotient", varlist)
3855 return s
3856
3857 @asaplog_post_dec
3858 def mx_quotient(self, mask = None, weight='median', preserve=True):
3859 """\
3860 Form a quotient using "off" beams when observing in "MX" mode.
3861
3862 Parameters:
3863
3864 mask: an optional mask to be used when weight == 'stddev'
3865
3866 weight: How to average the off beams. Default is 'median'.
3867
3868 preserve: you can preserve (default) the continuum or
3869 remove it. The equations used are:
3870
3871 preserve: Output = Toff * (on/off) - Toff
3872
3873 remove: Output = Toff * (on/off) - Ton
3874
3875 """
3876 mask = mask or ()
3877 varlist = vars()
3878 on = scantable(self._math._mx_extract(self, 'on'))
3879 preoff = scantable(self._math._mx_extract(self, 'off'))
3880 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
3881 from asapmath import quotient
3882 q = quotient(on, off, preserve)
3883 q._add_history("mx_quotient", varlist)
3884 return q
3885
3886 @asaplog_post_dec
3887 def freq_switch(self, insitu=None):
3888 """\
3889 Apply frequency switching to the data.
3890
3891 Parameters:
3892
3893 insitu: if False a new scantable is returned.
3894 Otherwise, the swictching is done in-situ
3895 The default is taken from .asaprc (False)
3896
3897 """
3898 if insitu is None: insitu = rcParams['insitu']
3899 self._math._setinsitu(insitu)
3900 varlist = vars()
3901 s = scantable(self._math._freqswitch(self))
3902 s._add_history("freq_switch", varlist)
3903 if insitu:
3904 self._assign(s)
3905 else:
3906 return s
3907
3908 @asaplog_post_dec
3909 def recalc_azel(self):
3910 """Recalculate the azimuth and elevation for each position."""
3911 varlist = vars()
3912 self._recalcazel()
3913 self._add_history("recalc_azel", varlist)
3914 return
3915
3916 @asaplog_post_dec
3917 def __add__(self, other):
3918 """
3919 implicit on all axes and on Tsys
3920 """
3921 varlist = vars()
3922 s = self.__op( other, "ADD" )
3923 s._add_history("operator +", varlist)
3924 return s
3925
3926 @asaplog_post_dec
3927 def __sub__(self, other):
3928 """
3929 implicit on all axes and on Tsys
3930 """
3931 varlist = vars()
3932 s = self.__op( other, "SUB" )
3933 s._add_history("operator -", varlist)
3934 return s
3935
3936 @asaplog_post_dec
3937 def __mul__(self, other):
3938 """
3939 implicit on all axes and on Tsys
3940 """
3941 varlist = vars()
3942 s = self.__op( other, "MUL" ) ;
3943 s._add_history("operator *", varlist)
3944 return s
3945
3946
3947 @asaplog_post_dec
3948 def __div__(self, other):
3949 """
3950 implicit on all axes and on Tsys
3951 """
3952 varlist = vars()
3953 s = self.__op( other, "DIV" )
3954 s._add_history("operator /", varlist)
3955 return s
3956
3957 @asaplog_post_dec
3958 def __op( self, other, mode ):
3959 s = None
3960 if isinstance(other, scantable):
3961 s = scantable(self._math._binaryop(self, other, mode))
3962 elif isinstance(other, float):
3963 if other == 0.0:
3964 raise ZeroDivisionError("Dividing by zero is not recommended")
3965 s = scantable(self._math._unaryop(self, other, mode, False))
3966 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3967 if isinstance(other[0], list) \
3968 or isinstance(other[0], numpy.ndarray):
3969 from asapmath import _array2dOp
3970 s = _array2dOp( self, other, mode, False )
3971 else:
3972 s = scantable( self._math._arrayop( self, other,
3973 mode, False ) )
3974 else:
3975 raise TypeError("Other input is not a scantable or float value")
3976 return s
3977
3978 @asaplog_post_dec
3979 def get_fit(self, row=0):
3980 """\
3981 Print or return the stored fits for a row in the scantable
3982
3983 Parameters:
3984
3985 row: the row which the fit has been applied to.
3986
3987 """
3988 if row > self.nrow():
3989 return
3990 from asap.asapfit import asapfit
3991 fit = asapfit(self._getfit(row))
3992 asaplog.push( '%s' %(fit) )
3993 return fit.as_dict()
3994
3995 @preserve_selection
3996 def flag_nans(self):
3997 """\
3998 Utility function to flag NaN values in the scantable.
3999 """
4000 import numpy
4001 basesel = self.get_selection()
4002 for i in range(self.nrow()):
4003 sel = self.get_row_selector(i)
4004 self.set_selection(basesel+sel)
4005 nans = numpy.isnan(self._getspectrum(0))
4006 if numpy.any(nans):
4007 bnans = [ bool(v) for v in nans]
4008 self.flag(bnans)
4009
4010 def get_row_selector(self, rowno):
4011 return selector(rows=[rowno])
4012
4013 def _add_history(self, funcname, parameters):
4014 if not rcParams['scantable.history']:
4015 return
4016 # create date
4017 sep = "##"
4018 from datetime import datetime
4019 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
4020 hist = dstr+sep
4021 hist += funcname+sep#cdate+sep
4022 if parameters.has_key('self'):
4023 del parameters['self']
4024 for k, v in parameters.iteritems():
4025 if type(v) is dict:
4026 for k2, v2 in v.iteritems():
4027 hist += k2
4028 hist += "="
4029 if isinstance(v2, scantable):
4030 hist += 'scantable'
4031 elif k2 == 'mask':
4032 if isinstance(v2, list) or isinstance(v2, tuple):
4033 hist += str(self._zip_mask(v2))
4034 else:
4035 hist += str(v2)
4036 else:
4037 hist += str(v2)
4038 else:
4039 hist += k
4040 hist += "="
4041 if isinstance(v, scantable):
4042 hist += 'scantable'
4043 elif k == 'mask':
4044 if isinstance(v, list) or isinstance(v, tuple):
4045 hist += str(self._zip_mask(v))
4046 else:
4047 hist += str(v)
4048 else:
4049 hist += str(v)
4050 hist += sep
4051 hist = hist[:-2] # remove trailing '##'
4052 self._addhistory(hist)
4053
4054
4055 def _zip_mask(self, mask):
4056 mask = list(mask)
4057 i = 0
4058 segments = []
4059 while mask[i:].count(1):
4060 i += mask[i:].index(1)
4061 if mask[i:].count(0):
4062 j = i + mask[i:].index(0)
4063 else:
4064 j = len(mask)
4065 segments.append([i, j])
4066 i = j
4067 return segments
4068
4069 def _get_ordinate_label(self):
4070 fu = "("+self.get_fluxunit()+")"
4071 import re
4072 lbl = "Intensity"
4073 if re.match(".K.", fu):
4074 lbl = "Brightness Temperature "+ fu
4075 elif re.match(".Jy.", fu):
4076 lbl = "Flux density "+ fu
4077 return lbl
4078
4079 def _check_ifs(self):
4080# return len(set([self.nchan(i) for i in self.getifnos()])) == 1
4081 nchans = [self.nchan(i) for i in self.getifnos()]
4082 nchans = filter(lambda t: t > 0, nchans)
4083 return (sum(nchans)/len(nchans) == nchans[0])
4084
4085 @asaplog_post_dec
4086 def _fill(self, names, unit, average, opts={}):
4087 first = True
4088 fullnames = []
4089 for name in names:
4090 name = os.path.expandvars(name)
4091 name = os.path.expanduser(name)
4092 if not os.path.exists(name):
4093 msg = "File '%s' does not exists" % (name)
4094 raise IOError(msg)
4095 fullnames.append(name)
4096 if average:
4097 asaplog.push('Auto averaging integrations')
4098 stype = int(rcParams['scantable.storage'].lower() == 'disk')
4099 for name in fullnames:
4100 tbl = Scantable(stype)
4101 if is_ms( name ):
4102 r = msfiller( tbl )
4103 else:
4104 r = filler( tbl )
4105 msg = "Importing %s..." % (name)
4106 asaplog.push(msg, False)
4107 r.open(name, opts)
4108 rx = rcParams['scantable.reference']
4109 r.setreferenceexpr(rx)
4110 r.fill()
4111 if average:
4112 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
4113 if not first:
4114 tbl = self._math._merge([self, tbl])
4115 Scantable.__init__(self, tbl)
4116 r.close()
4117 del r, tbl
4118 first = False
4119 #flush log
4120 asaplog.post()
4121 if unit is not None:
4122 self.set_fluxunit(unit)
4123 if not is_casapy():
4124 self.set_freqframe(rcParams['scantable.freqframe'])
4125
4126 def _get_verify_action( self, msg, action=None ):
4127 valid_act = ['Y', 'N', 'A', 'R']
4128 if not action or not isinstance(action, str):
4129 action = raw_input("%s [Y/n/a/r] (h for help): " % msg)
4130 if action == '':
4131 return "Y"
4132 elif (action.upper()[0] in valid_act):
4133 return action.upper()[0]
4134 elif (action.upper()[0] in ['H','?']):
4135 print "Available actions of verification [Y|n|a|r]"
4136 print " Y : Yes for current data (default)"
4137 print " N : No for current data"
4138 print " A : Accept all in the following and exit from verification"
4139 print " R : Reject all in the following and exit from verification"
4140 print " H or ?: help (show this message)"
4141 return self._get_verify_action(msg)
4142 else:
4143 return 'Y'
4144
4145 def __getitem__(self, key):
4146 if key < 0:
4147 key += self.nrow()
4148 if key >= self.nrow():
4149 raise IndexError("Row index out of range.")
4150 return self._getspectrum(key)
4151
4152 def __setitem__(self, key, value):
4153 if key < 0:
4154 key += self.nrow()
4155 if key >= self.nrow():
4156 raise IndexError("Row index out of range.")
4157 if not hasattr(value, "__len__") or \
4158 len(value) > self.nchan(self.getif(key)):
4159 raise ValueError("Spectrum length doesn't match.")
4160 return self._setspectrum(value, key)
4161
4162 def __len__(self):
4163 return self.nrow()
4164
4165 def __iter__(self):
4166 for i in range(len(self)):
4167 yield self[i]
Note: See TracBrowser for help on using the repository browser.