source: trunk/python/scantable.py@ 2701

Last change on this file since 2701 was 2687, checked in by Takeshi Nakazato, 12 years ago

New Development: No

JIRA Issue: No

Ready for Test: Yes

Interface Changes: No

What Interface Changed: Please list interface changes

Test Programs: from asap.scantable import is_scantable

is_scantable('<any CASA image>')
# if it return False the bug is fixed

Put in Release Notes: No

Module(s): Module Names change impacts.

Description: Describe your changes here...

Fixed a bug in is_scantable() that the function recognizes
CASA image as Scantable.


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 158.0 KB
Line 
1"""This module defines the scantable class."""
2
3import os
4import tempfile
5import numpy
6try:
7 from functools import wraps as wraps_dec
8except ImportError:
9 from asap.compatibility import wraps as wraps_dec
10
11from asap.env import is_casapy
12from asap._asap import Scantable
13from asap._asap import filler, msfiller
14from asap.parameters import rcParams
15from asap.logging import asaplog, asaplog_post_dec
16from asap.selector import selector
17from asap.linecatalog import linecatalog
18from asap.coordinate import coordinate
19from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
20from asap.asapfitter import fitter
21
22def preserve_selection(func):
23 @wraps_dec(func)
24 def wrap(obj, *args, **kw):
25 basesel = obj.get_selection()
26 try:
27 val = func(obj, *args, **kw)
28 finally:
29 obj.set_selection(basesel)
30 return val
31 return wrap
32
33def is_scantable(filename):
34 """Is the given file a scantable?
35
36 Parameters:
37
38 filename: the name of the file/directory to test
39
40 """
41 if ( os.path.isdir(filename)
42 and os.path.exists(filename+'/table.info')
43 and os.path.exists(filename+'/table.dat') ):
44 f=open(filename+'/table.info')
45 l=f.readline()
46 f.close()
47 if ( l.find('Scantable') != -1 ):
48 return True
49 elif ( l.find('Measurement Set') == -1 and
50 l.find('Image') == -1 ):
51 return True
52 else:
53 return False
54 else:
55 return False
56## return (os.path.isdir(filename)
57## and not os.path.exists(filename+'/table.f1')
58## and os.path.exists(filename+'/table.info'))
59
60def is_ms(filename):
61 """Is the given file a MeasurementSet?
62
63 Parameters:
64
65 filename: the name of the file/directory to test
66
67 """
68 if ( os.path.isdir(filename)
69 and os.path.exists(filename+'/table.info')
70 and os.path.exists(filename+'/table.dat') ):
71 f=open(filename+'/table.info')
72 l=f.readline()
73 f.close()
74 if ( l.find('Measurement Set') != -1 ):
75 return True
76 else:
77 return False
78 else:
79 return False
80
81def normalise_edge_param(edge):
82 """\
83 Convert a given edge value to a one-dimensional array that can be
84 given to baseline-fitting/subtraction functions.
85 The length of the output value will be an even because values for
86 the both sides of spectra are to be contained for each IF. When
87 the length is 2, the values will be applied to all IFs. If the length
88 is larger than 2, it will be 2*ifnos().
89 Accepted format of edge include:
90 * an integer - will be used for both sides of spectra of all IFs.
91 e.g. 10 is converted to [10,10]
92 * an empty list/tuple [] - converted to [0, 0] and used for all IFs.
93 * a list/tuple containing an integer - same as the above case.
94 e.g. [10] is converted to [10,10]
95 * a list/tuple containing two integers - will be used for all IFs.
96 e.g. [5,10] is output as it is. no need to convert.
97 * a list/tuple of lists/tuples containing TWO integers -
98 each element of edge will be used for each IF.
99 e.g. [[5,10],[15,20]] - [5,10] for IF[0] and [15,20] for IF[1].
100
101 If an element contains the same integer values, the input 'edge'
102 parameter can be given in a simpler shape in the following cases:
103 ** when len(edge)!=2
104 any elements containing the same values can be replaced
105 to single integers.
106 e.g. [[15,15]] can be simplified to [15] (or [15,15] or 15 also).
107 e.g. [[1,1],[2,2],[3,3]] can be simplified to [1,2,3].
108 ** when len(edge)=2
109 care is needed for this case: ONLY ONE of the
110 elements can be a single integer,
111 e.g. [[5,5],[10,10]] can be simplified to [5,[10,10]]
112 or [[5,5],10], but can NOT be simplified to [5,10].
113 when [5,10] given, it is interpreted as
114 [[5,10],[5,10],[5,10],...] instead, as shown before.
115 """
116 from asap import _is_sequence_or_number as _is_valid
117 if isinstance(edge, list) or isinstance(edge, tuple):
118 for edgepar in edge:
119 if not _is_valid(edgepar, int):
120 raise ValueError, "Each element of the 'edge' tuple has \
121 to be a pair of integers or an integer."
122 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
123 if len(edgepar) != 2:
124 raise ValueError, "Each element of the 'edge' tuple has \
125 to be a pair of integers or an integer."
126 else:
127 if not _is_valid(edge, int):
128 raise ValueError, "Parameter 'edge' has to be an integer or a \
129 pair of integers specified as a tuple. \
130 Nested tuples are allowed \
131 to make individual selection for different IFs."
132
133
134 if isinstance(edge, int):
135 edge = [ edge, edge ] # e.g. 3 => [3,3]
136 elif isinstance(edge, list) or isinstance(edge, tuple):
137 if len(edge) == 0:
138 edge = [0, 0] # e.g. [] => [0,0]
139 elif len(edge) == 1:
140 if isinstance(edge[0], int):
141 edge = [ edge[0], edge[0] ] # e.g. [1] => [1,1]
142
143 commonedge = True
144 if len(edge) > 2: commonedge = False
145 else:
146 for edgepar in edge:
147 if isinstance(edgepar, list) or isinstance(edgepar, tuple):
148 commonedge = False
149 break
150
151 if commonedge:
152 if len(edge) > 1:
153 norm_edge = edge
154 else:
155 norm_edge = edge + edge
156 else:
157 norm_edge = []
158 for edgepar in edge:
159 if isinstance(edgepar, int):
160 norm_edge += [edgepar, edgepar]
161 else:
162 norm_edge += edgepar
163
164 return norm_edge
165
166def raise_fitting_failure_exception(e):
167 msg = "The fit failed, possibly because it didn't converge."
168 if rcParams["verbose"]:
169 asaplog.push(str(e))
170 asaplog.push(str(msg))
171 else:
172 raise RuntimeError(str(e)+'\n'+msg)
173
174def pack_progress_params(showprogress, minnrow):
175 return str(showprogress).lower() + ',' + str(minnrow)
176
177class scantable(Scantable):
178 """\
179 The ASAP container for scans (single-dish data).
180 """
181
182 @asaplog_post_dec
183 def __init__(self, filename, average=None, unit=None, parallactify=None,
184 **args):
185 """\
186 Create a scantable from a saved one or make a reference
187
188 Parameters:
189
190 filename: the name of an asap table on disk
191 or
192 the name of a rpfits/sdfits/ms file
193 (integrations within scans are auto averaged
194 and the whole file is read) or
195 [advanced] a reference to an existing scantable
196
197 average: average all integrations withinb a scan on read.
198 The default (True) is taken from .asaprc.
199
200 unit: brightness unit; must be consistent with K or Jy.
201 Over-rides the default selected by the filler
202 (input rpfits/sdfits/ms) or replaces the value
203 in existing scantables
204
205 antenna: for MeasurementSet input data only:
206 Antenna selection. integer (id) or string
207 (name or id).
208
209 parallactify: Indicate that the data had been parallactified.
210 Default (false) is taken from rc file.
211
212 """
213 if average is None:
214 average = rcParams['scantable.autoaverage']
215 parallactify = parallactify or rcParams['scantable.parallactify']
216 varlist = vars()
217 from asap._asap import stmath
218 self._math = stmath( rcParams['insitu'] )
219 if isinstance(filename, Scantable):
220 Scantable.__init__(self, filename)
221 else:
222 if isinstance(filename, str):
223 filename = os.path.expandvars(filename)
224 filename = os.path.expanduser(filename)
225 if not os.path.exists(filename):
226 s = "File '%s' not found." % (filename)
227 raise IOError(s)
228 if is_scantable(filename):
229 ondisk = rcParams['scantable.storage'] == 'disk'
230 Scantable.__init__(self, filename, ondisk)
231 if unit is not None:
232 self.set_fluxunit(unit)
233 if average:
234 self._assign( self.average_time( scanav=True ) )
235 # do not reset to the default freqframe
236 #self.set_freqframe(rcParams['scantable.freqframe'])
237 elif is_ms(filename):
238 # Measurement Set
239 opts={'ms': {}}
240 mskeys=['getpt','antenna']
241 for key in mskeys:
242 if key in args.keys():
243 opts['ms'][key] = args[key]
244 self._fill([filename], unit, average, opts)
245 elif os.path.isfile(filename):
246 self._fill([filename], unit, average)
247 # only apply to new data not "copy constructor"
248 self.parallactify(parallactify)
249 else:
250 msg = "The given file '%s'is not a valid " \
251 "asap table." % (filename)
252 raise IOError(msg)
253 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
254 and isinstance(filename[-1], str):
255 self._fill(filename, unit, average)
256 self.parallactify(parallactify)
257 self._add_history("scantable", varlist)
258
259 @asaplog_post_dec
260 def save(self, name=None, format=None, overwrite=False):
261 """\
262 Store the scantable on disk. This can be an asap (aips++) Table,
263 SDFITS or MS2 format.
264
265 Parameters:
266
267 name: the name of the outputfile. For format 'ASCII'
268 this is the root file name (data in 'name'.txt
269 and header in 'name'_header.txt)
270
271 format: an optional file format. Default is ASAP.
272 Allowed are:
273
274 * 'ASAP' (save as ASAP [aips++] Table),
275 * 'SDFITS' (save as SDFITS file)
276 * 'ASCII' (saves as ascii text file)
277 * 'MS2' (saves as an casacore MeasurementSet V2)
278 * 'FITS' (save as image FITS - not readable by
279 class)
280 * 'CLASS' (save as FITS readable by CLASS)
281
282 overwrite: If the file should be overwritten if it exists.
283 The default False is to return with warning
284 without writing the output. USE WITH CARE.
285
286 Example::
287
288 scan.save('myscan.asap')
289 scan.save('myscan.sdfits', 'SDFITS')
290
291 """
292 from os import path
293 format = format or rcParams['scantable.save']
294 suffix = '.'+format.lower()
295 if name is None or name == "":
296 name = 'scantable'+suffix
297 msg = "No filename given. Using default name %s..." % name
298 asaplog.push(msg)
299 name = path.expandvars(name)
300 if path.isfile(name) or path.isdir(name):
301 if not overwrite:
302 msg = "File %s exists." % name
303 raise IOError(msg)
304 format2 = format.upper()
305 if format2 == 'ASAP':
306 self._save(name)
307 elif format2 == 'MS2':
308 msopt = {'ms': {'overwrite': overwrite } }
309 from asap._asap import mswriter
310 writer = mswriter( self )
311 writer.write( name, msopt )
312 else:
313 from asap._asap import stwriter as stw
314 writer = stw(format2)
315 writer.write(self, name)
316 return
317
318 def copy(self):
319 """Return a copy of this scantable.
320
321 *Note*:
322
323 This makes a full (deep) copy. scan2 = scan1 makes a reference.
324
325 Example::
326
327 copiedscan = scan.copy()
328
329 """
330 sd = scantable(Scantable._copy(self))
331 return sd
332
333 def drop_scan(self, scanid=None):
334 """\
335 Return a new scantable where the specified scan number(s) has(have)
336 been dropped.
337
338 Parameters:
339
340 scanid: a (list of) scan number(s)
341
342 """
343 from asap import _is_sequence_or_number as _is_valid
344 from asap import _to_list
345 from asap import unique
346 if not _is_valid(scanid):
347 raise RuntimeError( 'Please specify a scanno to drop from the'
348 ' scantable' )
349 scanid = _to_list(scanid)
350 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
351 for sid in scanid: allscans.remove(sid)
352 if len(allscans) == 0:
353 raise ValueError("Can't remove all scans")
354 sel = selector(scans=allscans)
355 return self._select_copy(sel)
356
357 def _select_copy(self, selection):
358 orig = self.get_selection()
359 self.set_selection(orig+selection)
360 cp = self.copy()
361 self.set_selection(orig)
362 return cp
363
364 def get_scan(self, scanid=None):
365 """\
366 Return a specific scan (by scanno) or collection of scans (by
367 source name) in a new scantable.
368
369 *Note*:
370
371 See scantable.drop_scan() for the inverse operation.
372
373 Parameters:
374
375 scanid: a (list of) scanno or a source name, unix-style
376 patterns are accepted for source name matching, e.g.
377 '*_R' gets all 'ref scans
378
379 Example::
380
381 # get all scans containing the source '323p459'
382 newscan = scan.get_scan('323p459')
383 # get all 'off' scans
384 refscans = scan.get_scan('*_R')
385 # get a susbset of scans by scanno (as listed in scan.summary())
386 newscan = scan.get_scan([0, 2, 7, 10])
387
388 """
389 if scanid is None:
390 raise RuntimeError( 'Please specify a scan no or name to '
391 'retrieve from the scantable' )
392 try:
393 bsel = self.get_selection()
394 sel = selector()
395 if type(scanid) is str:
396 sel.set_name(scanid)
397 return self._select_copy(sel)
398 elif type(scanid) is int:
399 sel.set_scans([scanid])
400 return self._select_copy(sel)
401 elif type(scanid) is list:
402 sel.set_scans(scanid)
403 return self._select_copy(sel)
404 else:
405 msg = "Illegal scanid type, use 'int' or 'list' if ints."
406 raise TypeError(msg)
407 except RuntimeError:
408 raise
409
410 def __str__(self):
411 tempFile = tempfile.NamedTemporaryFile()
412 Scantable._summary(self, tempFile.name)
413 tempFile.seek(0)
414 asaplog.clear()
415 return tempFile.file.read()
416
417 @asaplog_post_dec
418 def summary(self, filename=None):
419 """\
420 Print a summary of the contents of this scantable.
421
422 Parameters:
423
424 filename: the name of a file to write the putput to
425 Default - no file output
426
427 """
428 if filename is not None:
429 if filename is "":
430 filename = 'scantable_summary.txt'
431 from os.path import expandvars, isdir
432 filename = expandvars(filename)
433 if isdir(filename):
434 msg = "Illegal file name '%s'." % (filename)
435 raise IOError(msg)
436 else:
437 filename = ""
438 Scantable._summary(self, filename)
439
440 def get_spectrum(self, rowno):
441 """Return the spectrum for the current row in the scantable as a list.
442
443 Parameters:
444
445 rowno: the row number to retrieve the spectrum from
446
447 """
448 return self._getspectrum(rowno)
449
450 def get_mask(self, rowno):
451 """Return the mask for the current row in the scantable as a list.
452
453 Parameters:
454
455 rowno: the row number to retrieve the mask from
456
457 """
458 return self._getmask(rowno)
459
460 def set_spectrum(self, spec, rowno):
461 """Set the spectrum for the current row in the scantable.
462
463 Parameters:
464
465 spec: the new spectrum
466
467 rowno: the row number to set the spectrum for
468
469 """
470 assert(len(spec) == self.nchan(self.getif(rowno)))
471 return self._setspectrum(spec, rowno)
472
473 def get_coordinate(self, rowno):
474 """Return the (spectral) coordinate for a a given 'rowno'.
475
476 *Note*:
477
478 * This coordinate is only valid until a scantable method modifies
479 the frequency axis.
480 * This coordinate does contain the original frequency set-up
481 NOT the new frame. The conversions however are done using the user
482 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
483 use scantable.freq_align first. Without it there is no closure,
484 i.e.::
485
486 c = myscan.get_coordinate(0)
487 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
488
489 Parameters:
490
491 rowno: the row number for the spectral coordinate
492
493 """
494 return coordinate(Scantable.get_coordinate(self, rowno))
495
496 def get_selection(self):
497 """\
498 Get the selection object currently set on this scantable.
499
500 Example::
501
502 sel = scan.get_selection()
503 sel.set_ifs(0) # select IF 0
504 scan.set_selection(sel) # apply modified selection
505
506 """
507 return selector(self._getselection())
508
509 def set_selection(self, selection=None, **kw):
510 """\
511 Select a subset of the data. All following operations on this scantable
512 are only applied to thi selection.
513
514 Parameters:
515
516 selection: a selector object (default unset the selection), or
517 any combination of 'pols', 'ifs', 'beams', 'scans',
518 'cycles', 'name', 'query'
519
520 Examples::
521
522 sel = selector() # create a selection object
523 self.set_scans([0, 3]) # select SCANNO 0 and 3
524 scan.set_selection(sel) # set the selection
525 scan.summary() # will only print summary of scanno 0 an 3
526 scan.set_selection() # unset the selection
527 # or the equivalent
528 scan.set_selection(scans=[0,3])
529 scan.summary() # will only print summary of scanno 0 an 3
530 scan.set_selection() # unset the selection
531
532 """
533 if selection is None:
534 # reset
535 if len(kw) == 0:
536 selection = selector()
537 else:
538 # try keywords
539 for k in kw:
540 if k not in selector.fields:
541 raise KeyError("Invalid selection key '%s', "
542 "valid keys are %s" % (k,
543 selector.fields))
544 selection = selector(**kw)
545 self._setselection(selection)
546
547 def get_row(self, row=0, insitu=None):
548 """\
549 Select a row in the scantable.
550 Return a scantable with single row.
551
552 Parameters:
553
554 row: row no of integration, default is 0.
555 insitu: if False a new scantable is returned. Otherwise, the
556 scaling is done in-situ. The default is taken from .asaprc
557 (False)
558
559 """
560 if insitu is None:
561 insitu = rcParams['insitu']
562 if not insitu:
563 workscan = self.copy()
564 else:
565 workscan = self
566 # Select a row
567 sel = selector()
568 sel.set_rows([row])
569 workscan.set_selection(sel)
570 if not workscan.nrow() == 1:
571 msg = "Could not identify single row. %d rows selected." \
572 % (workscan.nrow())
573 raise RuntimeError(msg)
574 if insitu:
575 self._assign(workscan)
576 else:
577 return workscan
578
579 @asaplog_post_dec
580 def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
581 """\
582 Determine the specified statistic of the current beam/if/pol
583 Takes a 'mask' as an optional parameter to specify which
584 channels should be excluded.
585
586 Parameters:
587
588 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
589 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
590
591 mask: an optional mask specifying where the statistic
592 should be determined.
593
594 form: format string to print statistic values
595
596 row: row number of spectrum to process.
597 (default is None: for all rows)
598
599 Example:
600 scan.set_unit('channel')
601 msk = scan.create_mask([100, 200], [500, 600])
602 scan.stats(stat='mean', mask=m)
603
604 """
605 mask = mask or []
606 if not self._check_ifs():
607 raise ValueError("Cannot apply mask as the IFs have different "
608 "number of channels. Please use setselection() "
609 "to select individual IFs")
610 rtnabc = False
611 if stat.lower().endswith('_abc'): rtnabc = True
612 getchan = False
613 if stat.lower().startswith('min') or stat.lower().startswith('max'):
614 chan = self._math._minmaxchan(self, mask, stat)
615 getchan = True
616 statvals = []
617 if not rtnabc:
618 if row == None:
619 statvals = self._math._stats(self, mask, stat)
620 else:
621 statvals = self._math._statsrow(self, mask, stat, int(row))
622
623 #def cb(i):
624 # return statvals[i]
625
626 #return self._row_callback(cb, stat)
627
628 label=stat
629 #callback=cb
630 out = ""
631 #outvec = []
632 sep = '-'*50
633
634 if row == None:
635 rows = xrange(self.nrow())
636 elif isinstance(row, int):
637 rows = [ row ]
638
639 for i in rows:
640 refstr = ''
641 statunit= ''
642 if getchan:
643 qx, qy = self.chan2data(rowno=i, chan=chan[i])
644 if rtnabc:
645 statvals.append(qx['value'])
646 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
647 statunit= '['+qx['unit']+']'
648 else:
649 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
650
651 tm = self._gettime(i)
652 src = self._getsourcename(i)
653 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
654 out += 'Time[%s]:\n' % (tm)
655 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (self.getbeam(i))
656 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
657 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
658 #outvec.append(callback(i))
659 if len(rows) > 1:
660 # out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
661 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
662 else:
663 # out += ('= %'+form) % (outvec[0]) +' '+refstr+'\n'
664 out += ('= %'+form) % (statvals[0]) +' '+refstr+'\n'
665 out += sep+"\n"
666
667 import os
668 if os.environ.has_key( 'USER' ):
669 usr = os.environ['USER']
670 else:
671 import commands
672 usr = commands.getoutput( 'whoami' )
673 tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
674 f = open(tmpfile,'w')
675 print >> f, sep
676 print >> f, ' %s %s' % (label, statunit)
677 print >> f, sep
678 print >> f, out
679 f.close()
680 f = open(tmpfile,'r')
681 x = f.readlines()
682 f.close()
683 asaplog.push(''.join(x), False)
684
685 return statvals
686
687 def chan2data(self, rowno=0, chan=0):
688 """\
689 Returns channel/frequency/velocity and spectral value
690 at an arbitrary row and channel in the scantable.
691
692 Parameters:
693
694 rowno: a row number in the scantable. Default is the
695 first row, i.e. rowno=0
696
697 chan: a channel in the scantable. Default is the first
698 channel, i.e. pos=0
699
700 """
701 if isinstance(rowno, int) and isinstance(chan, int):
702 qx = {'unit': self.get_unit(),
703 'value': self._getabcissa(rowno)[chan]}
704 qy = {'unit': self.get_fluxunit(),
705 'value': self._getspectrum(rowno)[chan]}
706 return qx, qy
707
708 def stddev(self, mask=None):
709 """\
710 Determine the standard deviation of the current beam/if/pol
711 Takes a 'mask' as an optional parameter to specify which
712 channels should be excluded.
713
714 Parameters:
715
716 mask: an optional mask specifying where the standard
717 deviation should be determined.
718
719 Example::
720
721 scan.set_unit('channel')
722 msk = scan.create_mask([100, 200], [500, 600])
723 scan.stddev(mask=m)
724
725 """
726 return self.stats(stat='stddev', mask=mask);
727
728
729 def get_column_names(self):
730 """\
731 Return a list of column names, which can be used for selection.
732 """
733 return list(Scantable.get_column_names(self))
734
735 def get_tsys(self, row=-1):
736 """\
737 Return the System temperatures.
738
739 Parameters:
740
741 row: the rowno to get the information for. (default all rows)
742
743 Returns:
744
745 a list of Tsys values for the current selection
746
747 """
748 if row > -1:
749 return self._get_column(self._gettsys, row)
750 return self._row_callback(self._gettsys, "Tsys")
751
752 def get_tsysspectrum(self, row=-1):
753 """\
754 Return the channel dependent system temperatures.
755
756 Parameters:
757
758 row: the rowno to get the information for. (default all rows)
759
760 Returns:
761
762 a list of Tsys values for the current selection
763
764 """
765 return self._get_column( self._gettsysspectrum, row )
766
767 def get_weather(self, row=-1):
768 """\
769 Return the weather informations.
770
771 Parameters:
772
773 row: the rowno to get the information for. (default all rows)
774
775 Returns:
776
777 a dict or list of of dicts of values for the current selection
778
779 """
780
781 values = self._get_column(self._get_weather, row)
782 if row > -1:
783 return {'temperature': values[0],
784 'pressure': values[1], 'humidity' : values[2],
785 'windspeed' : values[3], 'windaz' : values[4]
786 }
787 else:
788 out = []
789 for r in values:
790
791 out.append({'temperature': r[0],
792 'pressure': r[1], 'humidity' : r[2],
793 'windspeed' : r[3], 'windaz' : r[4]
794 })
795 return out
796
797 def _row_callback(self, callback, label):
798 out = ""
799 outvec = []
800 sep = '-'*50
801 for i in range(self.nrow()):
802 tm = self._gettime(i)
803 src = self._getsourcename(i)
804 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
805 out += 'Time[%s]:\n' % (tm)
806 if self.nbeam(-1) > 1:
807 out += ' Beam[%d] ' % (self.getbeam(i))
808 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
809 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
810 outvec.append(callback(i))
811 out += '= %3.3f\n' % (outvec[i])
812 out += sep+'\n'
813
814 asaplog.push(sep)
815 asaplog.push(" %s" % (label))
816 asaplog.push(sep)
817 asaplog.push(out)
818 asaplog.post()
819 return outvec
820
821 def _get_column(self, callback, row=-1, *args):
822 """
823 """
824 if row == -1:
825 return [callback(i, *args) for i in range(self.nrow())]
826 else:
827 if 0 <= row < self.nrow():
828 return callback(row, *args)
829
830
831 def get_time(self, row=-1, asdatetime=False, prec=-1):
832 """\
833 Get a list of time stamps for the observations.
834 Return a datetime object or a string (default) for each
835 integration time stamp in the scantable.
836
837 Parameters:
838
839 row: row no of integration. Default -1 return all rows
840
841 asdatetime: return values as datetime objects rather than strings
842
843 prec: number of digits shown. Default -1 to automatic
844 calculation.
845 Note this number is equals to the digits of MVTime,
846 i.e., 0<prec<3: dates with hh:: only,
847 <5: with hh:mm:, <7 or 0: with hh:mm:ss,
848 and 6> : with hh:mm:ss.tt... (prec-6 t's added)
849
850 """
851 from datetime import datetime
852 if prec < 0:
853 # automagically set necessary precision +1
854 prec = 7 - \
855 numpy.floor(numpy.log10(numpy.min(self.get_inttime(row))))
856 prec = max(6, int(prec))
857 else:
858 prec = max(0, prec)
859 if asdatetime:
860 #precision can be 1 millisecond at max
861 prec = min(12, prec)
862
863 times = self._get_column(self._gettime, row, prec)
864 if not asdatetime:
865 return times
866 format = "%Y/%m/%d/%H:%M:%S.%f"
867 if prec < 7:
868 nsub = 1 + (((6-prec)/2) % 3)
869 substr = [".%f","%S","%M"]
870 for i in range(nsub):
871 format = format.replace(substr[i],"")
872 if isinstance(times, list):
873 return [datetime.strptime(i, format) for i in times]
874 else:
875 return datetime.strptime(times, format)
876
877
878 def get_inttime(self, row=-1):
879 """\
880 Get a list of integration times for the observations.
881 Return a time in seconds for each integration in the scantable.
882
883 Parameters:
884
885 row: row no of integration. Default -1 return all rows.
886
887 """
888 return self._get_column(self._getinttime, row)
889
890
891 def get_sourcename(self, row=-1):
892 """\
893 Get a list source names for the observations.
894 Return a string for each integration in the scantable.
895 Parameters:
896
897 row: row no of integration. Default -1 return all rows.
898
899 """
900 return self._get_column(self._getsourcename, row)
901
902 def get_elevation(self, row=-1):
903 """\
904 Get a list of elevations for the observations.
905 Return a float for each integration in the scantable.
906
907 Parameters:
908
909 row: row no of integration. Default -1 return all rows.
910
911 """
912 return self._get_column(self._getelevation, row)
913
914 def get_azimuth(self, row=-1):
915 """\
916 Get a list of azimuths for the observations.
917 Return a float for each integration in the scantable.
918
919 Parameters:
920 row: row no of integration. Default -1 return all rows.
921
922 """
923 return self._get_column(self._getazimuth, row)
924
925 def get_parangle(self, row=-1):
926 """\
927 Get a list of parallactic angles for the observations.
928 Return a float for each integration in the scantable.
929
930 Parameters:
931
932 row: row no of integration. Default -1 return all rows.
933
934 """
935 return self._get_column(self._getparangle, row)
936
937 def get_direction(self, row=-1):
938 """
939 Get a list of Positions on the sky (direction) for the observations.
940 Return a string for each integration in the scantable.
941
942 Parameters:
943
944 row: row no of integration. Default -1 return all rows
945
946 """
947 return self._get_column(self._getdirection, row)
948
949 def get_directionval(self, row=-1):
950 """\
951 Get a list of Positions on the sky (direction) for the observations.
952 Return a float for each integration in the scantable.
953
954 Parameters:
955
956 row: row no of integration. Default -1 return all rows
957
958 """
959 return self._get_column(self._getdirectionvec, row)
960
961 @asaplog_post_dec
962 def set_unit(self, unit='channel'):
963 """\
964 Set the unit for all following operations on this scantable
965
966 Parameters:
967
968 unit: optional unit, default is 'channel'. Use one of '*Hz',
969 'km/s', 'channel' or equivalent ''
970
971 """
972 varlist = vars()
973 if unit in ['', 'pixel', 'channel']:
974 unit = ''
975 inf = list(self._getcoordinfo())
976 inf[0] = unit
977 self._setcoordinfo(inf)
978 self._add_history("set_unit", varlist)
979
980 @asaplog_post_dec
981 def set_instrument(self, instr):
982 """\
983 Set the instrument for subsequent processing.
984
985 Parameters:
986
987 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
988 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
989
990 """
991 self._setInstrument(instr)
992 self._add_history("set_instument", vars())
993
994 @asaplog_post_dec
995 def set_feedtype(self, feedtype):
996 """\
997 Overwrite the feed type, which might not be set correctly.
998
999 Parameters:
1000
1001 feedtype: 'linear' or 'circular'
1002
1003 """
1004 self._setfeedtype(feedtype)
1005 self._add_history("set_feedtype", vars())
1006
1007 @asaplog_post_dec
1008 def set_doppler(self, doppler='RADIO'):
1009 """\
1010 Set the doppler for all following operations on this scantable.
1011
1012 Parameters:
1013
1014 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
1015
1016 """
1017 varlist = vars()
1018 inf = list(self._getcoordinfo())
1019 inf[2] = doppler
1020 self._setcoordinfo(inf)
1021 self._add_history("set_doppler", vars())
1022
1023 @asaplog_post_dec
1024 def set_freqframe(self, frame=None):
1025 """\
1026 Set the frame type of the Spectral Axis.
1027
1028 Parameters:
1029
1030 frame: an optional frame type, default 'LSRK'. Valid frames are:
1031 'TOPO', 'LSRD', 'LSRK', 'BARY',
1032 'GEO', 'GALACTO', 'LGROUP', 'CMB'
1033
1034 Example::
1035
1036 scan.set_freqframe('BARY')
1037
1038 """
1039 frame = frame or rcParams['scantable.freqframe']
1040 varlist = vars()
1041 # "REST" is not implemented in casacore
1042 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
1043 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1044 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
1045 'GEO', 'GALACTO', 'LGROUP', 'CMB']
1046
1047 if frame in valid:
1048 inf = list(self._getcoordinfo())
1049 inf[1] = frame
1050 self._setcoordinfo(inf)
1051 self._add_history("set_freqframe", varlist)
1052 else:
1053 msg = "Please specify a valid freq type. Valid types are:\n", valid
1054 raise TypeError(msg)
1055
1056 @asaplog_post_dec
1057 def set_dirframe(self, frame=""):
1058 """\
1059 Set the frame type of the Direction on the sky.
1060
1061 Parameters:
1062
1063 frame: an optional frame type, default ''. Valid frames are:
1064 'J2000', 'B1950', 'GALACTIC'
1065
1066 Example:
1067
1068 scan.set_dirframe('GALACTIC')
1069
1070 """
1071 varlist = vars()
1072 Scantable.set_dirframe(self, frame)
1073 self._add_history("set_dirframe", varlist)
1074
1075 def get_unit(self):
1076 """\
1077 Get the default unit set in this scantable
1078
1079 Returns:
1080
1081 A unit string
1082
1083 """
1084 inf = self._getcoordinfo()
1085 unit = inf[0]
1086 if unit == '': unit = 'channel'
1087 return unit
1088
1089 @asaplog_post_dec
1090 def get_abcissa(self, rowno=0):
1091 """\
1092 Get the abcissa in the current coordinate setup for the currently
1093 selected Beam/IF/Pol
1094
1095 Parameters:
1096
1097 rowno: an optional row number in the scantable. Default is the
1098 first row, i.e. rowno=0
1099
1100 Returns:
1101
1102 The abcissa values and the format string (as a dictionary)
1103
1104 """
1105 abc = self._getabcissa(rowno)
1106 lbl = self._getabcissalabel(rowno)
1107 return abc, lbl
1108
1109 @asaplog_post_dec
1110 def flag(self, mask=None, unflag=False, row=-1):
1111 """\
1112 Flag the selected data using an optional channel mask.
1113
1114 Parameters:
1115
1116 mask: an optional channel mask, created with create_mask. Default
1117 (no mask) is all channels.
1118
1119 unflag: if True, unflag the data
1120
1121 row: an optional row number in the scantable.
1122 Default -1 flags all rows
1123
1124 """
1125 varlist = vars()
1126 mask = mask or []
1127 self._flag(row, mask, unflag)
1128 self._add_history("flag", varlist)
1129
1130 @asaplog_post_dec
1131 def flag_row(self, rows=None, unflag=False):
1132 """\
1133 Flag the selected data in row-based manner.
1134
1135 Parameters:
1136
1137 rows: list of row numbers to be flagged. Default is no row
1138 (must be explicitly specified to execute row-based
1139 flagging).
1140
1141 unflag: if True, unflag the data.
1142
1143 """
1144 varlist = vars()
1145 if rows is None:
1146 rows = []
1147 self._flag_row(rows, unflag)
1148 self._add_history("flag_row", varlist)
1149
1150 @asaplog_post_dec
1151 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
1152 """\
1153 Flag the selected data outside a specified range (in channel-base)
1154
1155 Parameters:
1156
1157 uthres: upper threshold.
1158
1159 dthres: lower threshold
1160
1161 clipoutside: True for flagging data outside the range
1162 [dthres:uthres].
1163 False for flagging data inside the range.
1164
1165 unflag: if True, unflag the data.
1166
1167 """
1168 varlist = vars()
1169 self._clip(uthres, dthres, clipoutside, unflag)
1170 self._add_history("clip", varlist)
1171
1172 @asaplog_post_dec
1173 def lag_flag(self, start, end, unit="MHz", insitu=None):
1174 """\
1175 Flag the data in 'lag' space by providing a frequency to remove.
1176 Flagged data in the scantable get interpolated over the region.
1177 No taper is applied.
1178
1179 Parameters:
1180
1181 start: the start frequency (really a period within the
1182 bandwidth) or period to remove
1183
1184 end: the end frequency or period to remove
1185
1186 unit: the frequency unit (default 'MHz') or '' for
1187 explicit lag channels
1188
1189 *Notes*:
1190
1191 It is recommended to flag edges of the band or strong
1192 signals beforehand.
1193
1194 """
1195 if insitu is None: insitu = rcParams['insitu']
1196 self._math._setinsitu(insitu)
1197 varlist = vars()
1198 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1199 if not (unit == "" or base.has_key(unit)):
1200 raise ValueError("%s is not a valid unit." % unit)
1201 if unit == "":
1202 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1203 else:
1204 s = scantable(self._math._lag_flag(self, start*base[unit],
1205 end*base[unit], "frequency"))
1206 s._add_history("lag_flag", varlist)
1207 if insitu:
1208 self._assign(s)
1209 else:
1210 return s
1211
1212 @asaplog_post_dec
1213 def fft(self, rowno=None, mask=None, getrealimag=False):
1214 """\
1215 Apply FFT to the spectra.
1216 Flagged data in the scantable get interpolated over the region.
1217
1218 Parameters:
1219
1220 rowno: The row number(s) to be processed. int, list
1221 and tuple are accepted. By default (None), FFT
1222 is applied to the whole data.
1223
1224 mask: Auxiliary channel mask(s). Given as a boolean
1225 list, it is applied to all specified rows.
1226 A list of boolean lists can also be used to
1227 apply different masks. In the latter case, the
1228 length of 'mask' must be the same as that of
1229 'rowno'. The default is None.
1230
1231 getrealimag: If True, returns the real and imaginary part
1232 values of the complex results.
1233 If False (the default), returns the amplitude
1234 (absolute value) normalised with Ndata/2 and
1235 phase (argument, in unit of radian).
1236
1237 Returns:
1238
1239 A list of dictionaries containing the results for each spectrum.
1240 Each dictionary contains two values, the real and the imaginary
1241 parts when getrealimag = True, or the amplitude(absolute value)
1242 and the phase(argument) when getrealimag = False. The key for
1243 these values are 'real' and 'imag', or 'ampl' and 'phase',
1244 respectively.
1245 """
1246 if rowno is None:
1247 rowno = []
1248 if isinstance(rowno, int):
1249 rowno = [rowno]
1250 elif not (isinstance(rowno, list) or isinstance(rowno, tuple)):
1251 raise TypeError("The row number(s) must be int, list or tuple.")
1252 if len(rowno) == 0: rowno = [i for i in xrange(self.nrow())]
1253
1254 usecommonmask = True
1255
1256 if mask is None:
1257 mask = []
1258 if isinstance(mask, list) or isinstance(mask, tuple):
1259 if len(mask) == 0:
1260 mask = [[]]
1261 else:
1262 if isinstance(mask[0], bool):
1263 if len(mask) != self.nchan(self.getif(rowno[0])):
1264 raise ValueError("The spectra and the mask have "
1265 "different length.")
1266 mask = [mask]
1267 elif isinstance(mask[0], list) or isinstance(mask[0], tuple):
1268 usecommonmask = False
1269 if len(mask) != len(rowno):
1270 raise ValueError("When specifying masks for each "
1271 "spectrum, the numbers of them "
1272 "must be identical.")
1273 for i in xrange(mask):
1274 if len(mask[i]) != self.nchan(self.getif(rowno[i])):
1275 raise ValueError("The spectra and the mask have "
1276 "different length.")
1277 else:
1278 raise TypeError("The mask must be a boolean list or "
1279 "a list of boolean list.")
1280 else:
1281 raise TypeError("The mask must be a boolean list or a list of "
1282 "boolean list.")
1283
1284 res = []
1285
1286 imask = 0
1287 for whichrow in rowno:
1288 fspec = self._fft(whichrow, mask[imask], getrealimag)
1289 nspec = len(fspec)
1290
1291 i = 0
1292 v1 = []
1293 v2 = []
1294 reselem = {"real":[],"imag":[]} if getrealimag \
1295 else {"ampl":[],"phase":[]}
1296
1297 while (i < nspec):
1298 v1.append(fspec[i])
1299 v2.append(fspec[i+1])
1300 i += 2
1301
1302 if getrealimag:
1303 reselem["real"] += v1
1304 reselem["imag"] += v2
1305 else:
1306 reselem["ampl"] += v1
1307 reselem["phase"] += v2
1308
1309 res.append(reselem)
1310
1311 if not usecommonmask:
1312 imask += 1
1313
1314 return res
1315
1316 @asaplog_post_dec
1317 def create_mask(self, *args, **kwargs):
1318 """\
1319 Compute and return a mask based on [min, max] windows.
1320 The specified windows are to be INCLUDED, when the mask is
1321 applied.
1322
1323 Parameters:
1324
1325 [min, max], [min2, max2], ...
1326 Pairs of start/end points (inclusive)specifying the regions
1327 to be masked
1328
1329 invert: optional argument. If specified as True,
1330 return an inverted mask, i.e. the regions
1331 specified are EXCLUDED
1332
1333 row: create the mask using the specified row for
1334 unit conversions, default is row=0
1335 only necessary if frequency varies over rows.
1336
1337 Examples::
1338
1339 scan.set_unit('channel')
1340 # a)
1341 msk = scan.create_mask([400, 500], [800, 900])
1342 # masks everything outside 400 and 500
1343 # and 800 and 900 in the unit 'channel'
1344
1345 # b)
1346 msk = scan.create_mask([400, 500], [800, 900], invert=True)
1347 # masks the regions between 400 and 500
1348 # and 800 and 900 in the unit 'channel'
1349
1350 # c)
1351 #mask only channel 400
1352 msk = scan.create_mask([400])
1353
1354 """
1355 row = kwargs.get("row", 0)
1356 data = self._getabcissa(row)
1357 u = self._getcoordinfo()[0]
1358 if u == "":
1359 u = "channel"
1360 msg = "The current mask window unit is %s" % u
1361 i = self._check_ifs()
1362 if not i:
1363 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1364 asaplog.push(msg)
1365 n = len(data)
1366 msk = _n_bools(n, False)
1367 # test if args is a 'list' or a 'normal *args - UGLY!!!
1368
1369 ws = (isinstance(args[-1][-1], int)
1370 or isinstance(args[-1][-1], float)) and args or args[0]
1371 for window in ws:
1372 if len(window) == 1:
1373 window = [window[0], window[0]]
1374 if len(window) == 0 or len(window) > 2:
1375 raise ValueError("A window needs to be defined as "
1376 "[start(, end)]")
1377 if window[0] > window[1]:
1378 tmp = window[0]
1379 window[0] = window[1]
1380 window[1] = tmp
1381 for i in range(n):
1382 if data[i] >= window[0] and data[i] <= window[1]:
1383 msk[i] = True
1384 if kwargs.has_key('invert'):
1385 if kwargs.get('invert'):
1386 msk = mask_not(msk)
1387 return msk
1388
1389 def get_masklist(self, mask=None, row=0, silent=False):
1390 """\
1391 Compute and return a list of mask windows, [min, max].
1392
1393 Parameters:
1394
1395 mask: channel mask, created with create_mask.
1396
1397 row: calcutate the masklist using the specified row
1398 for unit conversions, default is row=0
1399 only necessary if frequency varies over rows.
1400
1401 Returns:
1402
1403 [min, max], [min2, max2], ...
1404 Pairs of start/end points (inclusive)specifying
1405 the masked regions
1406
1407 """
1408 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1409 raise TypeError("The mask should be list or tuple.")
1410 if len(mask) <= 0:
1411 raise TypeError("The mask elements should be > 0")
1412 data = self._getabcissa(row)
1413 if len(data) != len(mask):
1414 msg = "Number of channels in scantable != number of mask elements"
1415 raise TypeError(msg)
1416 u = self._getcoordinfo()[0]
1417 if u == "":
1418 u = "channel"
1419 msg = "The current mask window unit is %s" % u
1420 i = self._check_ifs()
1421 if not i:
1422 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1423 if not silent:
1424 asaplog.push(msg)
1425 masklist = []
1426 ist, ien = None, None
1427 ist, ien=self.get_mask_indices(mask)
1428 if ist is not None and ien is not None:
1429 for i in xrange(len(ist)):
1430 range=[data[ist[i]],data[ien[i]]]
1431 range.sort()
1432 masklist.append([range[0],range[1]])
1433 return masklist
1434
1435 def get_mask_indices(self, mask=None):
1436 """\
1437 Compute and Return lists of mask start indices and mask end indices.
1438
1439 Parameters:
1440
1441 mask: channel mask, created with create_mask.
1442
1443 Returns:
1444
1445 List of mask start indices and that of mask end indices,
1446 i.e., [istart1,istart2,....], [iend1,iend2,....].
1447
1448 """
1449 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1450 raise TypeError("The mask should be list or tuple.")
1451 if len(mask) <= 0:
1452 raise TypeError("The mask elements should be > 0")
1453 istart = []
1454 iend = []
1455 if mask[0]:
1456 istart.append(0)
1457 for i in range(len(mask)-1):
1458 if not mask[i] and mask[i+1]:
1459 istart.append(i+1)
1460 elif mask[i] and not mask[i+1]:
1461 iend.append(i)
1462 if mask[len(mask)-1]:
1463 iend.append(len(mask)-1)
1464 if len(istart) != len(iend):
1465 raise RuntimeError("Numbers of mask start != mask end.")
1466 for i in range(len(istart)):
1467 if istart[i] > iend[i]:
1468 raise RuntimeError("Mask start index > mask end index")
1469 break
1470 return istart,iend
1471
1472 @asaplog_post_dec
1473 def parse_maskexpr(self, maskstring):
1474 """
1475 Parse CASA type mask selection syntax (IF dependent).
1476
1477 Parameters:
1478 maskstring : A string mask selection expression.
1479 A comma separated selections mean different IF -
1480 channel combinations. IFs and channel selections
1481 are partitioned by a colon, ':'.
1482 examples:
1483 '' = all IFs (all channels)
1484 '<2,4~6,9' = IFs 0,1,4,5,6,9 (all channels)
1485 '3:3~45;60' = channels 3 to 45 and 60 in IF 3
1486 '0~1:2~6,8' = channels 2 to 6 in IFs 0,1, and
1487 all channels in IF8
1488 Returns:
1489 A dictionary of selected (valid) IF and masklist pairs,
1490 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
1491 """
1492 if not isinstance(maskstring,str):
1493 asaplog.post()
1494 asaplog.push("Mask expression should be a string.")
1495 asaplog.post("ERROR")
1496
1497 valid_ifs = self.getifnos()
1498 frequnit = self.get_unit()
1499 seldict = {}
1500 if maskstring == "":
1501 maskstring = str(valid_ifs)[1:-1]
1502 ## split each selection "IF range[:CHAN range]"
1503 sellist = maskstring.split(',')
1504 for currselstr in sellist:
1505 selset = currselstr.split(':')
1506 # spw and mask string (may include ~, < or >)
1507 spwmasklist = self._parse_selection(selset[0], typestr='integer',
1508 minval=min(valid_ifs),
1509 maxval=max(valid_ifs))
1510 for spwlist in spwmasklist:
1511 selspws = []
1512 for ispw in range(spwlist[0],spwlist[1]+1):
1513 # Put into the list only if ispw exists
1514 if valid_ifs.count(ispw):
1515 selspws.append(ispw)
1516 del spwmasklist, spwlist
1517
1518 # parse frequency mask list
1519 if len(selset) > 1:
1520 freqmasklist = self._parse_selection(selset[1], typestr='float',
1521 offset=0.)
1522 else:
1523 # want to select the whole spectrum
1524 freqmasklist = [None]
1525
1526 ## define a dictionary of spw - masklist combination
1527 for ispw in selspws:
1528 #print "working on", ispw
1529 spwstr = str(ispw)
1530 if len(selspws) == 0:
1531 # empty spw
1532 continue
1533 else:
1534 ## want to get min and max of the spw and
1535 ## offset to set for '<' and '>'
1536 if frequnit == 'channel':
1537 minfreq = 0
1538 maxfreq = self.nchan(ifno=ispw)
1539 offset = 0.5
1540 else:
1541 ## This is ugly part. need improvement
1542 for ifrow in xrange(self.nrow()):
1543 if self.getif(ifrow) == ispw:
1544 #print "IF",ispw,"found in row =",ifrow
1545 break
1546 freqcoord = self.get_coordinate(ifrow)
1547 freqs = self._getabcissa(ifrow)
1548 minfreq = min(freqs)
1549 maxfreq = max(freqs)
1550 if len(freqs) == 1:
1551 offset = 0.5
1552 elif frequnit.find('Hz') > 0:
1553 offset = abs(freqcoord.to_frequency(1,
1554 unit=frequnit)
1555 -freqcoord.to_frequency(0,
1556 unit=frequnit)
1557 )*0.5
1558 elif frequnit.find('m/s') > 0:
1559 offset = abs(freqcoord.to_velocity(1,
1560 unit=frequnit)
1561 -freqcoord.to_velocity(0,
1562 unit=frequnit)
1563 )*0.5
1564 else:
1565 asaplog.post()
1566 asaplog.push("Invalid frequency unit")
1567 asaplog.post("ERROR")
1568 del freqs, freqcoord, ifrow
1569 for freq in freqmasklist:
1570 selmask = freq or [minfreq, maxfreq]
1571 if selmask[0] == None:
1572 ## selection was "<freq[1]".
1573 if selmask[1] < minfreq:
1574 ## avoid adding region selection
1575 selmask = None
1576 else:
1577 selmask = [minfreq,selmask[1]-offset]
1578 elif selmask[1] == None:
1579 ## selection was ">freq[0]"
1580 if selmask[0] > maxfreq:
1581 ## avoid adding region selection
1582 selmask = None
1583 else:
1584 selmask = [selmask[0]+offset,maxfreq]
1585 if selmask:
1586 if not seldict.has_key(spwstr):
1587 # new spw selection
1588 seldict[spwstr] = []
1589 seldict[spwstr] += [selmask]
1590 del minfreq,maxfreq,offset,freq,selmask
1591 del spwstr
1592 del freqmasklist
1593 del valid_ifs
1594 if len(seldict) == 0:
1595 asaplog.post()
1596 asaplog.push("No valid selection in the mask expression: "
1597 +maskstring)
1598 asaplog.post("WARN")
1599 return None
1600 msg = "Selected masklist:\n"
1601 for sif, lmask in seldict.iteritems():
1602 msg += " IF"+sif+" - "+str(lmask)+"\n"
1603 asaplog.push(msg)
1604 return seldict
1605
1606 @asaplog_post_dec
1607 def parse_idx_selection(self, mode, selexpr):
1608 """
1609 Parse CASA type mask selection syntax of SCANNO, IFNO, POLNO,
1610 BEAMNO, and row number
1611
1612 Parameters:
1613 mode : which column to select.
1614 ['scan',|'if'|'pol'|'beam'|'row']
1615 selexpr : A comma separated selection expression.
1616 examples:
1617 '' = all (returns [])
1618 '<2,4~6,9' = indices less than 2, 4 to 6 and 9
1619 (returns [0,1,4,5,6,9])
1620 Returns:
1621 A List of selected indices
1622 """
1623 if selexpr == "":
1624 return []
1625 valid_modes = {'s': 'scan', 'i': 'if', 'p': 'pol',
1626 'b': 'beam', 'r': 'row'}
1627 smode = mode.lower()[0]
1628 if not (smode in valid_modes.keys()):
1629 msg = "Invalid mode '%s'. Valid modes are %s" %\
1630 (mode, str(valid_modes.values()))
1631 asaplog.post()
1632 asaplog.push(msg)
1633 asaplog.post("ERROR")
1634 mode = valid_modes[smode]
1635 minidx = None
1636 maxidx = None
1637 if smode == 'r':
1638 minidx = 0
1639 maxidx = self.nrow()
1640 else:
1641 idx = getattr(self,"get"+mode+"nos")()
1642 minidx = min(idx)
1643 maxidx = max(idx)
1644 del idx
1645 sellist = selexpr.split(',')
1646 idxlist = []
1647 for currselstr in sellist:
1648 # single range (may include ~, < or >)
1649 currlist = self._parse_selection(currselstr, typestr='integer',
1650 minval=minidx,maxval=maxidx)
1651 for thelist in currlist:
1652 idxlist += range(thelist[0],thelist[1]+1)
1653 msg = "Selected %s: %s" % (mode.upper()+"NO", str(idxlist))
1654 asaplog.push(msg)
1655 return idxlist
1656
1657 def _parse_selection(self, selstr, typestr='float', offset=0.,
1658 minval=None, maxval=None):
1659 """
1660 Parameters:
1661 selstr : The Selection string, e.g., '<3;5~7;100~103;9'
1662 typestr : The type of the values in returned list
1663 ('integer' or 'float')
1664 offset : The offset value to subtract from or add to
1665 the boundary value if the selection string
1666 includes '<' or '>' [Valid only for typestr='float']
1667 minval, maxval : The minimum/maximum values to set if the
1668 selection string includes '<' or '>'.
1669 The list element is filled with None by default.
1670 Returns:
1671 A list of min/max pair of selections.
1672 Example:
1673 _parse_selection('<3;5~7;9',typestr='int',minval=0)
1674 --> returns [[0,2],[5,7],[9,9]]
1675 _parse_selection('<3;5~7;9',typestr='float',offset=0.5,minval=0)
1676 --> returns [[0.,2.5],[5.0,7.0],[9.,9.]]
1677 """
1678 selgroups = selstr.split(';')
1679 sellists = []
1680 if typestr.lower().startswith('int'):
1681 formatfunc = int
1682 offset = 1
1683 else:
1684 formatfunc = float
1685
1686 for currsel in selgroups:
1687 if currsel.find('~') > 0:
1688 # val0 <= x <= val1
1689 minsel = formatfunc(currsel.split('~')[0].strip())
1690 maxsel = formatfunc(currsel.split('~')[1].strip())
1691 elif currsel.strip().find('<=') > -1:
1692 bound = currsel.split('<=')
1693 try: # try "x <= val"
1694 minsel = minval
1695 maxsel = formatfunc(bound[1].strip())
1696 except ValueError: # now "val <= x"
1697 minsel = formatfunc(bound[0].strip())
1698 maxsel = maxval
1699 elif currsel.strip().find('>=') > -1:
1700 bound = currsel.split('>=')
1701 try: # try "x >= val"
1702 minsel = formatfunc(bound[1].strip())
1703 maxsel = maxval
1704 except ValueError: # now "val >= x"
1705 minsel = minval
1706 maxsel = formatfunc(bound[0].strip())
1707 elif currsel.strip().find('<') > -1:
1708 bound = currsel.split('<')
1709 try: # try "x < val"
1710 minsel = minval
1711 maxsel = formatfunc(bound[1].strip()) \
1712 - formatfunc(offset)
1713 except ValueError: # now "val < x"
1714 minsel = formatfunc(bound[0].strip()) \
1715 + formatfunc(offset)
1716 maxsel = maxval
1717 elif currsel.strip().find('>') > -1:
1718 bound = currsel.split('>')
1719 try: # try "x > val"
1720 minsel = formatfunc(bound[1].strip()) \
1721 + formatfunc(offset)
1722 maxsel = maxval
1723 except ValueError: # now "val > x"
1724 minsel = minval
1725 maxsel = formatfunc(bound[0].strip()) \
1726 - formatfunc(offset)
1727 else:
1728 minsel = formatfunc(currsel)
1729 maxsel = formatfunc(currsel)
1730 sellists.append([minsel,maxsel])
1731 return sellists
1732
1733# def get_restfreqs(self):
1734# """
1735# Get the restfrequency(s) stored in this scantable.
1736# The return value(s) are always of unit 'Hz'
1737# Parameters:
1738# none
1739# Returns:
1740# a list of doubles
1741# """
1742# return list(self._getrestfreqs())
1743
1744 def get_restfreqs(self, ids=None):
1745 """\
1746 Get the restfrequency(s) stored in this scantable.
1747 The return value(s) are always of unit 'Hz'
1748
1749 Parameters:
1750
1751 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1752 be retrieved
1753
1754 Returns:
1755
1756 dictionary containing ids and a list of doubles for each id
1757
1758 """
1759 if ids is None:
1760 rfreqs = {}
1761 idlist = self.getmolnos()
1762 for i in idlist:
1763 rfreqs[i] = list(self._getrestfreqs(i))
1764 return rfreqs
1765 else:
1766 if type(ids) == list or type(ids) == tuple:
1767 rfreqs = {}
1768 for i in ids:
1769 rfreqs[i] = list(self._getrestfreqs(i))
1770 return rfreqs
1771 else:
1772 return list(self._getrestfreqs(ids))
1773
1774 @asaplog_post_dec
1775 def set_restfreqs(self, freqs=None, unit='Hz'):
1776 """\
1777 Set or replace the restfrequency specified and
1778 if the 'freqs' argument holds a scalar,
1779 then that rest frequency will be applied to all the selected
1780 data. If the 'freqs' argument holds
1781 a vector, then it MUST be of equal or smaller length than
1782 the number of IFs (and the available restfrequencies will be
1783 replaced by this vector). In this case, *all* data have
1784 the restfrequency set per IF according
1785 to the corresponding value you give in the 'freqs' vector.
1786 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
1787 IF 1 gets restfreq 2e9.
1788
1789 You can also specify the frequencies via a linecatalog.
1790
1791 Parameters:
1792
1793 freqs: list of rest frequency values or string idenitfiers
1794
1795 unit: unit for rest frequency (default 'Hz')
1796
1797
1798 Example::
1799
1800 # set the given restfrequency for the all currently selected IFs
1801 scan.set_restfreqs(freqs=1.4e9)
1802 # set restfrequencies for the n IFs (n > 1) in the order of the
1803 # list, i.e
1804 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
1805 # len(list_of_restfreqs) == nIF
1806 # for nIF == 1 the following will set multiple restfrequency for
1807 # that IF
1808 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
1809 # set multiple restfrequencies per IF. as a list of lists where
1810 # the outer list has nIF elements, the inner s arbitrary
1811 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
1812
1813 *Note*:
1814
1815 To do more sophisticate Restfrequency setting, e.g. on a
1816 source and IF basis, use scantable.set_selection() before using
1817 this function::
1818
1819 # provided your scantable is called scan
1820 selection = selector()
1821 selection.set_name('ORION*')
1822 selection.set_ifs([1])
1823 scan.set_selection(selection)
1824 scan.set_restfreqs(freqs=86.6e9)
1825
1826 """
1827 varlist = vars()
1828 from asap import linecatalog
1829 # simple value
1830 if isinstance(freqs, int) or isinstance(freqs, float):
1831 self._setrestfreqs([freqs], [""], unit)
1832 # list of values
1833 elif isinstance(freqs, list) or isinstance(freqs, tuple):
1834 # list values are scalars
1835 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
1836 if len(freqs) == 1:
1837 self._setrestfreqs(freqs, [""], unit)
1838 else:
1839 # allow the 'old' mode of setting mulitple IFs
1840 savesel = self._getselection()
1841 sel = self.get_selection()
1842 iflist = self.getifnos()
1843 if len(freqs)>len(iflist):
1844 raise ValueError("number of elements in list of list "
1845 "exeeds the current IF selections")
1846 iflist = self.getifnos()
1847 for i, fval in enumerate(freqs):
1848 sel.set_ifs(iflist[i])
1849 self._setselection(sel)
1850 self._setrestfreqs([fval], [""], unit)
1851 self._setselection(savesel)
1852
1853 # list values are dict, {'value'=, 'name'=)
1854 elif isinstance(freqs[-1], dict):
1855 values = []
1856 names = []
1857 for d in freqs:
1858 values.append(d["value"])
1859 names.append(d["name"])
1860 self._setrestfreqs(values, names, unit)
1861 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
1862 savesel = self._getselection()
1863 sel = self.get_selection()
1864 iflist = self.getifnos()
1865 if len(freqs)>len(iflist):
1866 raise ValueError("number of elements in list of list exeeds"
1867 " the current IF selections")
1868 for i, fval in enumerate(freqs):
1869 sel.set_ifs(iflist[i])
1870 self._setselection(sel)
1871 self._setrestfreqs(fval, [""], unit)
1872 self._setselection(savesel)
1873 # freqs are to be taken from a linecatalog
1874 elif isinstance(freqs, linecatalog):
1875 savesel = self._getselection()
1876 sel = self.get_selection()
1877 for i in xrange(freqs.nrow()):
1878 sel.set_ifs(iflist[i])
1879 self._setselection(sel)
1880 self._setrestfreqs([freqs.get_frequency(i)],
1881 [freqs.get_name(i)], "MHz")
1882 # ensure that we are not iterating past nIF
1883 if i == self.nif()-1: break
1884 self._setselection(savesel)
1885 else:
1886 return
1887 self._add_history("set_restfreqs", varlist)
1888
1889 @asaplog_post_dec
1890 def shift_refpix(self, delta):
1891 """\
1892 Shift the reference pixel of the Spectra Coordinate by an
1893 integer amount.
1894
1895 Parameters:
1896
1897 delta: the amount to shift by
1898
1899 *Note*:
1900
1901 Be careful using this with broadband data.
1902
1903 """
1904 varlist = vars()
1905 Scantable.shift_refpix(self, delta)
1906 s._add_history("shift_refpix", varlist)
1907
1908 @asaplog_post_dec
1909 def history(self, filename=None):
1910 """\
1911 Print the history. Optionally to a file.
1912
1913 Parameters:
1914
1915 filename: The name of the file to save the history to.
1916
1917 """
1918 hist = list(self._gethistory())
1919 out = "-"*80
1920 for h in hist:
1921 if h.startswith("---"):
1922 out = "\n".join([out, h])
1923 else:
1924 items = h.split("##")
1925 date = items[0]
1926 func = items[1]
1927 items = items[2:]
1928 out += "\n"+date+"\n"
1929 out += "Function: %s\n Parameters:" % (func)
1930 for i in items:
1931 if i == '':
1932 continue
1933 s = i.split("=")
1934 out += "\n %s = %s" % (s[0], s[1])
1935 out = "\n".join([out, "-"*80])
1936 if filename is not None:
1937 if filename is "":
1938 filename = 'scantable_history.txt'
1939 import os
1940 filename = os.path.expandvars(os.path.expanduser(filename))
1941 if not os.path.isdir(filename):
1942 data = open(filename, 'w')
1943 data.write(out)
1944 data.close()
1945 else:
1946 msg = "Illegal file name '%s'." % (filename)
1947 raise IOError(msg)
1948 return page(out)
1949
1950 #
1951 # Maths business
1952 #
1953 @asaplog_post_dec
1954 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
1955 """\
1956 Return the (time) weighted average of a scan. Scans will be averaged
1957 only if the source direction (RA/DEC) is within 1' otherwise
1958
1959 *Note*:
1960
1961 in channels only - align if necessary
1962
1963 Parameters:
1964
1965 mask: an optional mask (only used for 'var' and 'tsys'
1966 weighting)
1967
1968 scanav: True averages each scan separately
1969 False (default) averages all scans together,
1970
1971 weight: Weighting scheme.
1972 'none' (mean no weight)
1973 'var' (1/var(spec) weighted)
1974 'tsys' (1/Tsys**2 weighted)
1975 'tint' (integration time weighted)
1976 'tintsys' (Tint/Tsys**2)
1977 'median' ( median averaging)
1978 The default is 'tint'
1979
1980 align: align the spectra in velocity before averaging. It takes
1981 the time of the first spectrum as reference time.
1982
1983 Example::
1984
1985 # time average the scantable without using a mask
1986 newscan = scan.average_time()
1987
1988 """
1989 varlist = vars()
1990 weight = weight or 'TINT'
1991 mask = mask or ()
1992 scanav = (scanav and 'SCAN') or 'NONE'
1993 scan = (self, )
1994
1995 if align:
1996 scan = (self.freq_align(insitu=False), )
1997 s = None
1998 if weight.upper() == 'MEDIAN':
1999 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
2000 scanav))
2001 else:
2002 s = scantable(self._math._average(scan, mask, weight.upper(),
2003 scanav))
2004 s._add_history("average_time", varlist)
2005 return s
2006
2007 @asaplog_post_dec
2008 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
2009 """\
2010 Return a scan where all spectra are converted to either
2011 Jansky or Kelvin depending upon the flux units of the scan table.
2012 By default the function tries to look the values up internally.
2013 If it can't find them (or if you want to over-ride), you must
2014 specify EITHER jyperk OR eta (and D which it will try to look up
2015 also if you don't set it). jyperk takes precedence if you set both.
2016
2017 Parameters:
2018
2019 jyperk: the Jy / K conversion factor
2020
2021 eta: the aperture efficiency
2022
2023 d: the geometric diameter (metres)
2024
2025 insitu: if False a new scantable is returned.
2026 Otherwise, the scaling is done in-situ
2027 The default is taken from .asaprc (False)
2028
2029 """
2030 if insitu is None: insitu = rcParams['insitu']
2031 self._math._setinsitu(insitu)
2032 varlist = vars()
2033 jyperk = jyperk or -1.0
2034 d = d or -1.0
2035 eta = eta or -1.0
2036 s = scantable(self._math._convertflux(self, d, eta, jyperk))
2037 s._add_history("convert_flux", varlist)
2038 if insitu: self._assign(s)
2039 else: return s
2040
2041 @asaplog_post_dec
2042 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
2043 """\
2044 Return a scan after applying a gain-elevation correction.
2045 The correction can be made via either a polynomial or a
2046 table-based interpolation (and extrapolation if necessary).
2047 You specify polynomial coefficients, an ascii table or neither.
2048 If you specify neither, then a polynomial correction will be made
2049 with built in coefficients known for certain telescopes (an error
2050 will occur if the instrument is not known).
2051 The data and Tsys are *divided* by the scaling factors.
2052
2053 Parameters:
2054
2055 poly: Polynomial coefficients (default None) to compute a
2056 gain-elevation correction as a function of
2057 elevation (in degrees).
2058
2059 filename: The name of an ascii file holding correction factors.
2060 The first row of the ascii file must give the column
2061 names and these MUST include columns
2062 'ELEVATION' (degrees) and 'FACTOR' (multiply data
2063 by this) somewhere.
2064 The second row must give the data type of the
2065 column. Use 'R' for Real and 'I' for Integer.
2066 An example file would be
2067 (actual factors are arbitrary) :
2068
2069 TIME ELEVATION FACTOR
2070 R R R
2071 0.1 0 0.8
2072 0.2 20 0.85
2073 0.3 40 0.9
2074 0.4 60 0.85
2075 0.5 80 0.8
2076 0.6 90 0.75
2077
2078 method: Interpolation method when correcting from a table.
2079 Values are 'nearest', 'linear' (default), 'cubic'
2080 and 'spline'
2081
2082 insitu: if False a new scantable is returned.
2083 Otherwise, the scaling is done in-situ
2084 The default is taken from .asaprc (False)
2085
2086 """
2087
2088 if insitu is None: insitu = rcParams['insitu']
2089 self._math._setinsitu(insitu)
2090 varlist = vars()
2091 poly = poly or ()
2092 from os.path import expandvars
2093 filename = expandvars(filename)
2094 s = scantable(self._math._gainel(self, poly, filename, method))
2095 s._add_history("gain_el", varlist)
2096 if insitu:
2097 self._assign(s)
2098 else:
2099 return s
2100
2101 @asaplog_post_dec
2102 def freq_align(self, reftime=None, method='cubic', insitu=None):
2103 """\
2104 Return a scan where all rows have been aligned in frequency/velocity.
2105 The alignment frequency frame (e.g. LSRK) is that set by function
2106 set_freqframe.
2107
2108 Parameters:
2109
2110 reftime: reference time to align at. By default, the time of
2111 the first row of data is used.
2112
2113 method: Interpolation method for regridding the spectra.
2114 Choose from 'nearest', 'linear', 'cubic' (default)
2115 and 'spline'
2116
2117 insitu: if False a new scantable is returned.
2118 Otherwise, the scaling is done in-situ
2119 The default is taken from .asaprc (False)
2120
2121 """
2122 if insitu is None: insitu = rcParams["insitu"]
2123 oldInsitu = self._math._insitu()
2124 self._math._setinsitu(insitu)
2125 varlist = vars()
2126 reftime = reftime or ""
2127 s = scantable(self._math._freq_align(self, reftime, method))
2128 s._add_history("freq_align", varlist)
2129 self._math._setinsitu(oldInsitu)
2130 if insitu:
2131 self._assign(s)
2132 else:
2133 return s
2134
2135 @asaplog_post_dec
2136 def opacity(self, tau=None, insitu=None):
2137 """\
2138 Apply an opacity correction. The data
2139 and Tsys are multiplied by the correction factor.
2140
2141 Parameters:
2142
2143 tau: (list of) opacity from which the correction factor is
2144 exp(tau*ZD)
2145 where ZD is the zenith-distance.
2146 If a list is provided, it has to be of length nIF,
2147 nIF*nPol or 1 and in order of IF/POL, e.g.
2148 [opif0pol0, opif0pol1, opif1pol0 ...]
2149 if tau is `None` the opacities are determined from a
2150 model.
2151
2152 insitu: if False a new scantable is returned.
2153 Otherwise, the scaling is done in-situ
2154 The default is taken from .asaprc (False)
2155
2156 """
2157 if insitu is None:
2158 insitu = rcParams['insitu']
2159 self._math._setinsitu(insitu)
2160 varlist = vars()
2161 if not hasattr(tau, "__len__"):
2162 tau = [tau]
2163 s = scantable(self._math._opacity(self, tau))
2164 s._add_history("opacity", varlist)
2165 if insitu:
2166 self._assign(s)
2167 else:
2168 return s
2169
2170 @asaplog_post_dec
2171 def bin(self, width=5, insitu=None):
2172 """\
2173 Return a scan where all spectra have been binned up.
2174
2175 Parameters:
2176
2177 width: The bin width (default=5) in pixels
2178
2179 insitu: if False a new scantable is returned.
2180 Otherwise, the scaling is done in-situ
2181 The default is taken from .asaprc (False)
2182
2183 """
2184 if insitu is None:
2185 insitu = rcParams['insitu']
2186 self._math._setinsitu(insitu)
2187 varlist = vars()
2188 s = scantable(self._math._bin(self, width))
2189 s._add_history("bin", varlist)
2190 if insitu:
2191 self._assign(s)
2192 else:
2193 return s
2194
2195 @asaplog_post_dec
2196 def reshape(self, first, last, insitu=None):
2197 """Resize the band by providing first and last channel.
2198 This will cut off all channels outside [first, last].
2199 """
2200 if insitu is None:
2201 insitu = rcParams['insitu']
2202 varlist = vars()
2203 if last < 0:
2204 last = self.nchan()-1 + last
2205 s = None
2206 if insitu:
2207 s = self
2208 else:
2209 s = self.copy()
2210 s._reshape(first,last)
2211 s._add_history("reshape", varlist)
2212 if not insitu:
2213 return s
2214
2215 @asaplog_post_dec
2216 def resample(self, width=5, method='cubic', insitu=None):
2217 """\
2218 Return a scan where all spectra have been binned up.
2219
2220 Parameters:
2221
2222 width: The bin width (default=5) in pixels
2223
2224 method: Interpolation method when correcting from a table.
2225 Values are 'nearest', 'linear', 'cubic' (default)
2226 and 'spline'
2227
2228 insitu: if False a new scantable is returned.
2229 Otherwise, the scaling is done in-situ
2230 The default is taken from .asaprc (False)
2231
2232 """
2233 if insitu is None:
2234 insitu = rcParams['insitu']
2235 self._math._setinsitu(insitu)
2236 varlist = vars()
2237 s = scantable(self._math._resample(self, method, width))
2238 s._add_history("resample", varlist)
2239 if insitu:
2240 self._assign(s)
2241 else:
2242 return s
2243
2244 @asaplog_post_dec
2245 def average_pol(self, mask=None, weight='none'):
2246 """\
2247 Average the Polarisations together.
2248
2249 Parameters:
2250
2251 mask: An optional mask defining the region, where the
2252 averaging will be applied. The output will have all
2253 specified points masked.
2254
2255 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2256 weighted), or 'tsys' (1/Tsys**2 weighted)
2257
2258 """
2259 varlist = vars()
2260 mask = mask or ()
2261 s = scantable(self._math._averagepol(self, mask, weight.upper()))
2262 s._add_history("average_pol", varlist)
2263 return s
2264
2265 @asaplog_post_dec
2266 def average_beam(self, mask=None, weight='none'):
2267 """\
2268 Average the Beams together.
2269
2270 Parameters:
2271 mask: An optional mask defining the region, where the
2272 averaging will be applied. The output will have all
2273 specified points masked.
2274
2275 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2276 weighted), or 'tsys' (1/Tsys**2 weighted)
2277
2278 """
2279 varlist = vars()
2280 mask = mask or ()
2281 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
2282 s._add_history("average_beam", varlist)
2283 return s
2284
2285 def parallactify(self, pflag):
2286 """\
2287 Set a flag to indicate whether this data should be treated as having
2288 been 'parallactified' (total phase == 0.0)
2289
2290 Parameters:
2291
2292 pflag: Bool indicating whether to turn this on (True) or
2293 off (False)
2294
2295 """
2296 varlist = vars()
2297 self._parallactify(pflag)
2298 self._add_history("parallactify", varlist)
2299
2300 @asaplog_post_dec
2301 def convert_pol(self, poltype=None):
2302 """\
2303 Convert the data to a different polarisation type.
2304 Note that you will need cross-polarisation terms for most conversions.
2305
2306 Parameters:
2307
2308 poltype: The new polarisation type. Valid types are:
2309 'linear', 'circular', 'stokes' and 'linpol'
2310
2311 """
2312 varlist = vars()
2313 s = scantable(self._math._convertpol(self, poltype))
2314 s._add_history("convert_pol", varlist)
2315 return s
2316
2317 @asaplog_post_dec
2318 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False,
2319 insitu=None):
2320 """\
2321 Smooth the spectrum by the specified kernel (conserving flux).
2322
2323 Parameters:
2324
2325 kernel: The type of smoothing kernel. Select from
2326 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
2327 or 'poly'
2328
2329 width: The width of the kernel in pixels. For hanning this is
2330 ignored otherwise it defauls to 5 pixels.
2331 For 'gaussian' it is the Full Width Half
2332 Maximum. For 'boxcar' it is the full width.
2333 For 'rmedian' and 'poly' it is the half width.
2334
2335 order: Optional parameter for 'poly' kernel (default is 2), to
2336 specify the order of the polnomial. Ignored by all other
2337 kernels.
2338
2339 plot: plot the original and the smoothed spectra.
2340 In this each indivual fit has to be approved, by
2341 typing 'y' or 'n'
2342
2343 insitu: if False a new scantable is returned.
2344 Otherwise, the scaling is done in-situ
2345 The default is taken from .asaprc (False)
2346
2347 """
2348 if insitu is None: insitu = rcParams['insitu']
2349 self._math._setinsitu(insitu)
2350 varlist = vars()
2351
2352 if plot: orgscan = self.copy()
2353
2354 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
2355 s._add_history("smooth", varlist)
2356
2357 action = 'H'
2358 if plot:
2359 from asap.asapplotter import new_asaplot
2360 theplot = new_asaplot(rcParams['plotter.gui'])
2361 from matplotlib import rc as rcp
2362 rcp('lines', linewidth=1)
2363 theplot.set_panels()
2364 ylab=s._get_ordinate_label()
2365 #theplot.palette(0,["#777777","red"])
2366 for r in xrange(s.nrow()):
2367 xsm=s._getabcissa(r)
2368 ysm=s._getspectrum(r)
2369 xorg=orgscan._getabcissa(r)
2370 yorg=orgscan._getspectrum(r)
2371 if action != "N": #skip plotting if rejecting all
2372 theplot.clear()
2373 theplot.hold()
2374 theplot.set_axes('ylabel',ylab)
2375 theplot.set_axes('xlabel',s._getabcissalabel(r))
2376 theplot.set_axes('title',s._getsourcename(r))
2377 theplot.set_line(label='Original',color="#777777")
2378 theplot.plot(xorg,yorg)
2379 theplot.set_line(label='Smoothed',color="red")
2380 theplot.plot(xsm,ysm)
2381 ### Ugly part for legend
2382 for i in [0,1]:
2383 theplot.subplots[0]['lines'].append(
2384 [theplot.subplots[0]['axes'].lines[i]]
2385 )
2386 theplot.release()
2387 ### Ugly part for legend
2388 theplot.subplots[0]['lines']=[]
2389 res = self._get_verify_action("Accept smoothing?",action)
2390 #print "IF%d, POL%d: got result = %s" %(s.getif(r),s.getpol(r),res)
2391 if r == 0: action = None
2392 #res = raw_input("Accept smoothing ([y]/n): ")
2393 if res.upper() == 'N':
2394 # reject for the current rows
2395 s._setspectrum(yorg, r)
2396 elif res.upper() == 'R':
2397 # reject all the following rows
2398 action = "N"
2399 s._setspectrum(yorg, r)
2400 elif res.upper() == 'A':
2401 # accept all the following rows
2402 break
2403 theplot.quit()
2404 del theplot
2405 del orgscan
2406
2407 if insitu: self._assign(s)
2408 else: return s
2409
2410 @asaplog_post_dec
2411 def regrid_channel(self, width=5, plot=False, insitu=None):
2412 """\
2413 Regrid the spectra by the specified channel width
2414
2415 Parameters:
2416
2417 width: The channel width (float) of regridded spectra
2418 in the current spectral unit.
2419
2420 plot: [NOT IMPLEMENTED YET]
2421 plot the original and the regridded spectra.
2422 In this each indivual fit has to be approved, by
2423 typing 'y' or 'n'
2424
2425 insitu: if False a new scantable is returned.
2426 Otherwise, the scaling is done in-situ
2427 The default is taken from .asaprc (False)
2428
2429 """
2430 if insitu is None: insitu = rcParams['insitu']
2431 varlist = vars()
2432
2433 if plot:
2434 asaplog.post()
2435 asaplog.push("Verification plot is not implemtnetd yet.")
2436 asaplog.post("WARN")
2437
2438 s = self.copy()
2439 s._regrid_specchan(width)
2440
2441 s._add_history("regrid_channel", varlist)
2442
2443# if plot:
2444# from asap.asapplotter import new_asaplot
2445# theplot = new_asaplot(rcParams['plotter.gui'])
2446# from matplotlib import rc as rcp
2447# rcp('lines', linewidth=1)
2448# theplot.set_panels()
2449# ylab=s._get_ordinate_label()
2450# #theplot.palette(0,["#777777","red"])
2451# for r in xrange(s.nrow()):
2452# xsm=s._getabcissa(r)
2453# ysm=s._getspectrum(r)
2454# xorg=orgscan._getabcissa(r)
2455# yorg=orgscan._getspectrum(r)
2456# theplot.clear()
2457# theplot.hold()
2458# theplot.set_axes('ylabel',ylab)
2459# theplot.set_axes('xlabel',s._getabcissalabel(r))
2460# theplot.set_axes('title',s._getsourcename(r))
2461# theplot.set_line(label='Original',color="#777777")
2462# theplot.plot(xorg,yorg)
2463# theplot.set_line(label='Smoothed',color="red")
2464# theplot.plot(xsm,ysm)
2465# ### Ugly part for legend
2466# for i in [0,1]:
2467# theplot.subplots[0]['lines'].append(
2468# [theplot.subplots[0]['axes'].lines[i]]
2469# )
2470# theplot.release()
2471# ### Ugly part for legend
2472# theplot.subplots[0]['lines']=[]
2473# res = raw_input("Accept smoothing ([y]/n): ")
2474# if res.upper() == 'N':
2475# s._setspectrum(yorg, r)
2476# theplot.quit()
2477# del theplot
2478# del orgscan
2479
2480 if insitu: self._assign(s)
2481 else: return s
2482
2483 @asaplog_post_dec
2484 def _parse_wn(self, wn):
2485 if isinstance(wn, list) or isinstance(wn, tuple):
2486 return wn
2487 elif isinstance(wn, int):
2488 return [ wn ]
2489 elif isinstance(wn, str):
2490 if '-' in wn: # case 'a-b' : return [a,a+1,...,b-1,b]
2491 val = wn.split('-')
2492 val = [int(val[0]), int(val[1])]
2493 val.sort()
2494 res = [i for i in xrange(val[0], val[1]+1)]
2495 elif wn[:2] == '<=' or wn[:2] == '=<': # cases '<=a','=<a' : return [0,1,...,a-1,a]
2496 val = int(wn[2:])+1
2497 res = [i for i in xrange(val)]
2498 elif wn[-2:] == '>=' or wn[-2:] == '=>': # cases 'a>=','a=>' : return [0,1,...,a-1,a]
2499 val = int(wn[:-2])+1
2500 res = [i for i in xrange(val)]
2501 elif wn[0] == '<': # case '<a' : return [0,1,...,a-2,a-1]
2502 val = int(wn[1:])
2503 res = [i for i in xrange(val)]
2504 elif wn[-1] == '>': # case 'a>' : return [0,1,...,a-2,a-1]
2505 val = int(wn[:-1])
2506 res = [i for i in xrange(val)]
2507 elif wn[:2] == '>=' or wn[:2] == '=>': # cases '>=a','=>a' : return [a,-999], which is
2508 # then interpreted in C++
2509 # side as [a,a+1,...,a_nyq]
2510 # (CAS-3759)
2511 val = int(wn[2:])
2512 res = [val, -999]
2513 #res = [i for i in xrange(val, self.nchan()/2+1)]
2514 elif wn[-2:] == '<=' or wn[-2:] == '=<': # cases 'a<=','a=<' : return [a,-999], which is
2515 # then interpreted in C++
2516 # side as [a,a+1,...,a_nyq]
2517 # (CAS-3759)
2518 val = int(wn[:-2])
2519 res = [val, -999]
2520 #res = [i for i in xrange(val, self.nchan()/2+1)]
2521 elif wn[0] == '>': # case '>a' : return [a+1,-999], which is
2522 # then interpreted in C++
2523 # side as [a+1,a+2,...,a_nyq]
2524 # (CAS-3759)
2525 val = int(wn[1:])+1
2526 res = [val, -999]
2527 #res = [i for i in xrange(val, self.nchan()/2+1)]
2528 elif wn[-1] == '<': # case 'a<' : return [a+1,-999], which is
2529 # then interpreted in C++
2530 # side as [a+1,a+2,...,a_nyq]
2531 # (CAS-3759)
2532 val = int(wn[:-1])+1
2533 res = [val, -999]
2534 #res = [i for i in xrange(val, self.nchan()/2+1)]
2535
2536 return res
2537 else:
2538 msg = 'wrong value given for addwn/rejwn'
2539 raise RuntimeError(msg)
2540
2541
2542 @asaplog_post_dec
2543 def sinusoid_baseline(self, insitu=None, mask=None, applyfft=None,
2544 fftmethod=None, fftthresh=None,
2545 addwn=None, rejwn=None, clipthresh=None,
2546 clipniter=None, plot=None,
2547 getresidual=None, showprogress=None,
2548 minnrow=None, outlog=None, blfile=None, csvformat=None):
2549 """\
2550 Return a scan which has been baselined (all rows) with sinusoidal
2551 functions.
2552
2553 Parameters:
2554 insitu: if False a new scantable is returned.
2555 Otherwise, the scaling is done in-situ
2556 The default is taken from .asaprc (False)
2557 mask: an optional mask
2558 applyfft: if True use some method, such as FFT, to find
2559 strongest sinusoidal components in the wavenumber
2560 domain to be used for baseline fitting.
2561 default is True.
2562 fftmethod: method to find the strong sinusoidal components.
2563 now only 'fft' is available and it is the default.
2564 fftthresh: the threshold to select wave numbers to be used for
2565 fitting from the distribution of amplitudes in the
2566 wavenumber domain.
2567 both float and string values accepted.
2568 given a float value, the unit is set to sigma.
2569 for string values, allowed formats include:
2570 'xsigma' or 'x' (= x-sigma level. e.g.,
2571 '3sigma'), or
2572 'topx' (= the x strongest ones, e.g. 'top5').
2573 default is 3.0 (unit: sigma).
2574 addwn: the additional wave numbers to be used for fitting.
2575 list or integer value is accepted to specify every
2576 wave numbers. also string value can be used in case
2577 you need to specify wave numbers in a certain range,
2578 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
2579 '<a' (= 0,1,...,a-2,a-1),
2580 '>=a' (= a, a+1, ... up to the maximum wave
2581 number corresponding to the Nyquist
2582 frequency for the case of FFT).
2583 default is [0].
2584 rejwn: the wave numbers NOT to be used for fitting.
2585 can be set just as addwn but has higher priority:
2586 wave numbers which are specified both in addwn
2587 and rejwn will NOT be used. default is [].
2588 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2589 clipniter: maximum number of iteration of 'clipthresh'-sigma
2590 clipping (default is 0)
2591 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2592 plot the fit and the residual. In this each
2593 indivual fit has to be approved, by typing 'y'
2594 or 'n'
2595 getresidual: if False, returns best-fit values instead of
2596 residual. (default is True)
2597 showprogress: show progress status for large data.
2598 default is True.
2599 minnrow: minimum number of input spectra to show.
2600 default is 1000.
2601 outlog: Output the coefficients of the best-fit
2602 function to logger (default is False)
2603 blfile: Name of a text file in which the best-fit
2604 parameter values to be written
2605 (default is '': no file/logger output)
2606 csvformat: if True blfile is csv-formatted, default is False.
2607
2608 Example:
2609 # return a scan baselined by a combination of sinusoidal curves
2610 # having wave numbers in spectral window up to 10,
2611 # also with 3-sigma clipping, iteration up to 4 times
2612 bscan = scan.sinusoid_baseline(addwn='<=10',clipthresh=3.0,clipniter=4)
2613
2614 Note:
2615 The best-fit parameter values output in logger and/or blfile are now
2616 based on specunit of 'channel'.
2617 """
2618
2619 try:
2620 varlist = vars()
2621
2622 if insitu is None: insitu = rcParams['insitu']
2623 if insitu:
2624 workscan = self
2625 else:
2626 workscan = self.copy()
2627
2628 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
2629 if mask is None: mask = []
2630 if applyfft is None: applyfft = True
2631 if fftmethod is None: fftmethod = 'fft'
2632 if fftthresh is None: fftthresh = 3.0
2633 if addwn is None: addwn = [0]
2634 if rejwn is None: rejwn = []
2635 if clipthresh is None: clipthresh = 3.0
2636 if clipniter is None: clipniter = 0
2637 if plot is None: plot = False
2638 if getresidual is None: getresidual = True
2639 if showprogress is None: showprogress = True
2640 if minnrow is None: minnrow = 1000
2641 if outlog is None: outlog = False
2642 if blfile is None: blfile = ''
2643 if csvformat is None: csvformat = False
2644
2645 if csvformat:
2646 scsvformat = "T"
2647 else:
2648 scsvformat = "F"
2649
2650 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
2651 workscan._sinusoid_baseline(mask, applyfft, fftmethod.lower(),
2652 str(fftthresh).lower(),
2653 workscan._parse_wn(addwn),
2654 workscan._parse_wn(rejwn),
2655 clipthresh, clipniter,
2656 getresidual,
2657 pack_progress_params(showprogress,
2658 minnrow),
2659 outlog, scsvformat+blfile)
2660 workscan._add_history('sinusoid_baseline', varlist)
2661
2662 if insitu:
2663 self._assign(workscan)
2664 else:
2665 return workscan
2666
2667 except RuntimeError, e:
2668 raise_fitting_failure_exception(e)
2669
2670
2671 @asaplog_post_dec
2672 def auto_sinusoid_baseline(self, insitu=None, mask=None, applyfft=None,
2673 fftmethod=None, fftthresh=None,
2674 addwn=None, rejwn=None, clipthresh=None,
2675 clipniter=None, edge=None, threshold=None,
2676 chan_avg_limit=None, plot=None,
2677 getresidual=None, showprogress=None,
2678 minnrow=None, outlog=None, blfile=None, csvformat=None):
2679 """\
2680 Return a scan which has been baselined (all rows) with sinusoidal
2681 functions.
2682 Spectral lines are detected first using linefinder and masked out
2683 to avoid them affecting the baseline solution.
2684
2685 Parameters:
2686 insitu: if False a new scantable is returned.
2687 Otherwise, the scaling is done in-situ
2688 The default is taken from .asaprc (False)
2689 mask: an optional mask retreived from scantable
2690 applyfft: if True use some method, such as FFT, to find
2691 strongest sinusoidal components in the wavenumber
2692 domain to be used for baseline fitting.
2693 default is True.
2694 fftmethod: method to find the strong sinusoidal components.
2695 now only 'fft' is available and it is the default.
2696 fftthresh: the threshold to select wave numbers to be used for
2697 fitting from the distribution of amplitudes in the
2698 wavenumber domain.
2699 both float and string values accepted.
2700 given a float value, the unit is set to sigma.
2701 for string values, allowed formats include:
2702 'xsigma' or 'x' (= x-sigma level. e.g.,
2703 '3sigma'), or
2704 'topx' (= the x strongest ones, e.g. 'top5').
2705 default is 3.0 (unit: sigma).
2706 addwn: the additional wave numbers to be used for fitting.
2707 list or integer value is accepted to specify every
2708 wave numbers. also string value can be used in case
2709 you need to specify wave numbers in a certain range,
2710 e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
2711 '<a' (= 0,1,...,a-2,a-1),
2712 '>=a' (= a, a+1, ... up to the maximum wave
2713 number corresponding to the Nyquist
2714 frequency for the case of FFT).
2715 default is [0].
2716 rejwn: the wave numbers NOT to be used for fitting.
2717 can be set just as addwn but has higher priority:
2718 wave numbers which are specified both in addwn
2719 and rejwn will NOT be used. default is [].
2720 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2721 clipniter: maximum number of iteration of 'clipthresh'-sigma
2722 clipping (default is 0)
2723 edge: an optional number of channel to drop at
2724 the edge of spectrum. If only one value is
2725 specified, the same number will be dropped
2726 from both sides of the spectrum. Default
2727 is to keep all channels. Nested tuples
2728 represent individual edge selection for
2729 different IFs (a number of spectral channels
2730 can be different)
2731 threshold: the threshold used by line finder. It is
2732 better to keep it large as only strong lines
2733 affect the baseline solution.
2734 chan_avg_limit: a maximum number of consequtive spectral
2735 channels to average during the search of
2736 weak and broad lines. The default is no
2737 averaging (and no search for weak lines).
2738 If such lines can affect the fitted baseline
2739 (e.g. a high order polynomial is fitted),
2740 increase this parameter (usually values up
2741 to 8 are reasonable). Most users of this
2742 method should find the default value sufficient.
2743 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2744 plot the fit and the residual. In this each
2745 indivual fit has to be approved, by typing 'y'
2746 or 'n'
2747 getresidual: if False, returns best-fit values instead of
2748 residual. (default is True)
2749 showprogress: show progress status for large data.
2750 default is True.
2751 minnrow: minimum number of input spectra to show.
2752 default is 1000.
2753 outlog: Output the coefficients of the best-fit
2754 function to logger (default is False)
2755 blfile: Name of a text file in which the best-fit
2756 parameter values to be written
2757 (default is "": no file/logger output)
2758 csvformat: if True blfile is csv-formatted, default is False.
2759
2760 Example:
2761 bscan = scan.auto_sinusoid_baseline(addwn='<=10', insitu=False)
2762
2763 Note:
2764 The best-fit parameter values output in logger and/or blfile are now
2765 based on specunit of 'channel'.
2766 """
2767
2768 try:
2769 varlist = vars()
2770
2771 if insitu is None: insitu = rcParams['insitu']
2772 if insitu:
2773 workscan = self
2774 else:
2775 workscan = self.copy()
2776
2777 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
2778 if mask is None: mask = []
2779 if applyfft is None: applyfft = True
2780 if fftmethod is None: fftmethod = 'fft'
2781 if fftthresh is None: fftthresh = 3.0
2782 if addwn is None: addwn = [0]
2783 if rejwn is None: rejwn = []
2784 if clipthresh is None: clipthresh = 3.0
2785 if clipniter is None: clipniter = 0
2786 if edge is None: edge = (0,0)
2787 if threshold is None: threshold = 3
2788 if chan_avg_limit is None: chan_avg_limit = 1
2789 if plot is None: plot = False
2790 if getresidual is None: getresidual = True
2791 if showprogress is None: showprogress = True
2792 if minnrow is None: minnrow = 1000
2793 if outlog is None: outlog = False
2794 if blfile is None: blfile = ''
2795 if csvformat is None: csvformat = False
2796
2797 if csvformat:
2798 scsvformat = "T"
2799 else:
2800 scsvformat = "F"
2801
2802 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
2803 workscan._auto_sinusoid_baseline(mask, applyfft,
2804 fftmethod.lower(),
2805 str(fftthresh).lower(),
2806 workscan._parse_wn(addwn),
2807 workscan._parse_wn(rejwn),
2808 clipthresh, clipniter,
2809 normalise_edge_param(edge),
2810 threshold, chan_avg_limit,
2811 getresidual,
2812 pack_progress_params(showprogress,
2813 minnrow),
2814 outlog, scsvformat+blfile)
2815 workscan._add_history("auto_sinusoid_baseline", varlist)
2816
2817 if insitu:
2818 self._assign(workscan)
2819 else:
2820 return workscan
2821
2822 except RuntimeError, e:
2823 raise_fitting_failure_exception(e)
2824
2825 @asaplog_post_dec
2826 def cspline_baseline(self, insitu=None, mask=None, npiece=None,
2827 clipthresh=None, clipniter=None, plot=None,
2828 getresidual=None, showprogress=None, minnrow=None,
2829 outlog=None, blfile=None, csvformat=None):
2830 """\
2831 Return a scan which has been baselined (all rows) by cubic spline
2832 function (piecewise cubic polynomial).
2833
2834 Parameters:
2835 insitu: If False a new scantable is returned.
2836 Otherwise, the scaling is done in-situ
2837 The default is taken from .asaprc (False)
2838 mask: An optional mask
2839 npiece: Number of pieces. (default is 2)
2840 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2841 clipniter: maximum number of iteration of 'clipthresh'-sigma
2842 clipping (default is 0)
2843 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2844 plot the fit and the residual. In this each
2845 indivual fit has to be approved, by typing 'y'
2846 or 'n'
2847 getresidual: if False, returns best-fit values instead of
2848 residual. (default is True)
2849 showprogress: show progress status for large data.
2850 default is True.
2851 minnrow: minimum number of input spectra to show.
2852 default is 1000.
2853 outlog: Output the coefficients of the best-fit
2854 function to logger (default is False)
2855 blfile: Name of a text file in which the best-fit
2856 parameter values to be written
2857 (default is "": no file/logger output)
2858 csvformat: if True blfile is csv-formatted, default is False.
2859
2860 Example:
2861 # return a scan baselined by a cubic spline consisting of 2 pieces
2862 # (i.e., 1 internal knot),
2863 # also with 3-sigma clipping, iteration up to 4 times
2864 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
2865
2866 Note:
2867 The best-fit parameter values output in logger and/or blfile are now
2868 based on specunit of 'channel'.
2869 """
2870
2871 try:
2872 varlist = vars()
2873
2874 if insitu is None: insitu = rcParams['insitu']
2875 if insitu:
2876 workscan = self
2877 else:
2878 workscan = self.copy()
2879
2880 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
2881 if mask is None: mask = []
2882 if npiece is None: npiece = 2
2883 if clipthresh is None: clipthresh = 3.0
2884 if clipniter is None: clipniter = 0
2885 if plot is None: plot = False
2886 if getresidual is None: getresidual = True
2887 if showprogress is None: showprogress = True
2888 if minnrow is None: minnrow = 1000
2889 if outlog is None: outlog = False
2890 if blfile is None: blfile = ''
2891 if csvformat is None: csvformat = False
2892
2893 if csvformat:
2894 scsvformat = "T"
2895 else:
2896 scsvformat = "F"
2897
2898 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
2899 workscan._cspline_baseline(mask, npiece, clipthresh, clipniter,
2900 getresidual,
2901 pack_progress_params(showprogress,
2902 minnrow),
2903 outlog, scsvformat+blfile)
2904 workscan._add_history("cspline_baseline", varlist)
2905
2906 if insitu:
2907 self._assign(workscan)
2908 else:
2909 return workscan
2910
2911 except RuntimeError, e:
2912 raise_fitting_failure_exception(e)
2913
2914 @asaplog_post_dec
2915 def auto_cspline_baseline(self, insitu=None, mask=None, npiece=None,
2916 clipthresh=None, clipniter=None,
2917 edge=None, threshold=None, chan_avg_limit=None,
2918 getresidual=None, plot=None,
2919 showprogress=None, minnrow=None, outlog=None,
2920 blfile=None, csvformat=None):
2921 """\
2922 Return a scan which has been baselined (all rows) by cubic spline
2923 function (piecewise cubic polynomial).
2924 Spectral lines are detected first using linefinder and masked out
2925 to avoid them affecting the baseline solution.
2926
2927 Parameters:
2928 insitu: if False a new scantable is returned.
2929 Otherwise, the scaling is done in-situ
2930 The default is taken from .asaprc (False)
2931 mask: an optional mask retreived from scantable
2932 npiece: Number of pieces. (default is 2)
2933 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2934 clipniter: maximum number of iteration of 'clipthresh'-sigma
2935 clipping (default is 0)
2936 edge: an optional number of channel to drop at
2937 the edge of spectrum. If only one value is
2938 specified, the same number will be dropped
2939 from both sides of the spectrum. Default
2940 is to keep all channels. Nested tuples
2941 represent individual edge selection for
2942 different IFs (a number of spectral channels
2943 can be different)
2944 threshold: the threshold used by line finder. It is
2945 better to keep it large as only strong lines
2946 affect the baseline solution.
2947 chan_avg_limit: a maximum number of consequtive spectral
2948 channels to average during the search of
2949 weak and broad lines. The default is no
2950 averaging (and no search for weak lines).
2951 If such lines can affect the fitted baseline
2952 (e.g. a high order polynomial is fitted),
2953 increase this parameter (usually values up
2954 to 8 are reasonable). Most users of this
2955 method should find the default value sufficient.
2956 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2957 plot the fit and the residual. In this each
2958 indivual fit has to be approved, by typing 'y'
2959 or 'n'
2960 getresidual: if False, returns best-fit values instead of
2961 residual. (default is True)
2962 showprogress: show progress status for large data.
2963 default is True.
2964 minnrow: minimum number of input spectra to show.
2965 default is 1000.
2966 outlog: Output the coefficients of the best-fit
2967 function to logger (default is False)
2968 blfile: Name of a text file in which the best-fit
2969 parameter values to be written
2970 (default is "": no file/logger output)
2971 csvformat: if True blfile is csv-formatted, default is False.
2972
2973 Example:
2974 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
2975
2976 Note:
2977 The best-fit parameter values output in logger and/or blfile are now
2978 based on specunit of 'channel'.
2979 """
2980
2981 try:
2982 varlist = vars()
2983
2984 if insitu is None: insitu = rcParams['insitu']
2985 if insitu:
2986 workscan = self
2987 else:
2988 workscan = self.copy()
2989
2990 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
2991 if mask is None: mask = []
2992 if npiece is None: npiece = 2
2993 if clipthresh is None: clipthresh = 3.0
2994 if clipniter is None: clipniter = 0
2995 if edge is None: edge = (0, 0)
2996 if threshold is None: threshold = 3
2997 if chan_avg_limit is None: chan_avg_limit = 1
2998 if plot is None: plot = False
2999 if getresidual is None: getresidual = True
3000 if showprogress is None: showprogress = True
3001 if minnrow is None: minnrow = 1000
3002 if outlog is None: outlog = False
3003 if blfile is None: blfile = ''
3004 if csvformat is None: csvformat = False
3005
3006 if csvformat:
3007 scsvformat = "T"
3008 else:
3009 scsvformat = "F"
3010
3011 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
3012 workscan._auto_cspline_baseline(mask, npiece, clipthresh,
3013 clipniter,
3014 normalise_edge_param(edge),
3015 threshold,
3016 chan_avg_limit, getresidual,
3017 pack_progress_params(showprogress,
3018 minnrow),
3019 outlog, scsvformat+blfile)
3020 workscan._add_history("auto_cspline_baseline", varlist)
3021
3022 if insitu:
3023 self._assign(workscan)
3024 else:
3025 return workscan
3026
3027 except RuntimeError, e:
3028 raise_fitting_failure_exception(e)
3029
3030 @asaplog_post_dec
3031 def chebyshev_baseline(self, insitu=None, mask=None, order=None,
3032 clipthresh=None, clipniter=None, plot=None,
3033 getresidual=None, showprogress=None, minnrow=None,
3034 outlog=None, blfile=None, csvformat=None):
3035 """\
3036 Return a scan which has been baselined (all rows) by Chebyshev polynomials.
3037
3038 Parameters:
3039 insitu: If False a new scantable is returned.
3040 Otherwise, the scaling is done in-situ
3041 The default is taken from .asaprc (False)
3042 mask: An optional mask
3043 order: the maximum order of Chebyshev polynomial (default is 5)
3044 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3045 clipniter: maximum number of iteration of 'clipthresh'-sigma
3046 clipping (default is 0)
3047 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3048 plot the fit and the residual. In this each
3049 indivual fit has to be approved, by typing 'y'
3050 or 'n'
3051 getresidual: if False, returns best-fit values instead of
3052 residual. (default is True)
3053 showprogress: show progress status for large data.
3054 default is True.
3055 minnrow: minimum number of input spectra to show.
3056 default is 1000.
3057 outlog: Output the coefficients of the best-fit
3058 function to logger (default is False)
3059 blfile: Name of a text file in which the best-fit
3060 parameter values to be written
3061 (default is "": no file/logger output)
3062 csvformat: if True blfile is csv-formatted, default is False.
3063
3064 Example:
3065 # return a scan baselined by a cubic spline consisting of 2 pieces
3066 # (i.e., 1 internal knot),
3067 # also with 3-sigma clipping, iteration up to 4 times
3068 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
3069
3070 Note:
3071 The best-fit parameter values output in logger and/or blfile are now
3072 based on specunit of 'channel'.
3073 """
3074
3075 try:
3076 varlist = vars()
3077
3078 if insitu is None: insitu = rcParams['insitu']
3079 if insitu:
3080 workscan = self
3081 else:
3082 workscan = self.copy()
3083
3084 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
3085 if mask is None: mask = []
3086 if order is None: order = 5
3087 if clipthresh is None: clipthresh = 3.0
3088 if clipniter is None: clipniter = 0
3089 if plot is None: plot = False
3090 if getresidual is None: getresidual = True
3091 if showprogress is None: showprogress = True
3092 if minnrow is None: minnrow = 1000
3093 if outlog is None: outlog = False
3094 if blfile is None: blfile = ''
3095 if csvformat is None: csvformat = False
3096
3097 if csvformat:
3098 scsvformat = "T"
3099 else:
3100 scsvformat = "F"
3101
3102 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
3103 workscan._chebyshev_baseline(mask, order, clipthresh, clipniter,
3104 getresidual,
3105 pack_progress_params(showprogress,
3106 minnrow),
3107 outlog, scsvformat+blfile)
3108 workscan._add_history("chebyshev_baseline", varlist)
3109
3110 if insitu:
3111 self._assign(workscan)
3112 else:
3113 return workscan
3114
3115 except RuntimeError, e:
3116 raise_fitting_failure_exception(e)
3117
3118 @asaplog_post_dec
3119 def auto_chebyshev_baseline(self, insitu=None, mask=None, order=None,
3120 clipthresh=None, clipniter=None,
3121 edge=None, threshold=None, chan_avg_limit=None,
3122 getresidual=None, plot=None,
3123 showprogress=None, minnrow=None, outlog=None,
3124 blfile=None, csvformat=None):
3125 """\
3126 Return a scan which has been baselined (all rows) by Chebyshev polynomials.
3127 Spectral lines are detected first using linefinder and masked out
3128 to avoid them affecting the baseline solution.
3129
3130 Parameters:
3131 insitu: if False a new scantable is returned.
3132 Otherwise, the scaling is done in-situ
3133 The default is taken from .asaprc (False)
3134 mask: an optional mask retreived from scantable
3135 order: the maximum order of Chebyshev polynomial (default is 5)
3136 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
3137 clipniter: maximum number of iteration of 'clipthresh'-sigma
3138 clipping (default is 0)
3139 edge: an optional number of channel to drop at
3140 the edge of spectrum. If only one value is
3141 specified, the same number will be dropped
3142 from both sides of the spectrum. Default
3143 is to keep all channels. Nested tuples
3144 represent individual edge selection for
3145 different IFs (a number of spectral channels
3146 can be different)
3147 threshold: the threshold used by line finder. It is
3148 better to keep it large as only strong lines
3149 affect the baseline solution.
3150 chan_avg_limit: a maximum number of consequtive spectral
3151 channels to average during the search of
3152 weak and broad lines. The default is no
3153 averaging (and no search for weak lines).
3154 If such lines can affect the fitted baseline
3155 (e.g. a high order polynomial is fitted),
3156 increase this parameter (usually values up
3157 to 8 are reasonable). Most users of this
3158 method should find the default value sufficient.
3159 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
3160 plot the fit and the residual. In this each
3161 indivual fit has to be approved, by typing 'y'
3162 or 'n'
3163 getresidual: if False, returns best-fit values instead of
3164 residual. (default is True)
3165 showprogress: show progress status for large data.
3166 default is True.
3167 minnrow: minimum number of input spectra to show.
3168 default is 1000.
3169 outlog: Output the coefficients of the best-fit
3170 function to logger (default is False)
3171 blfile: Name of a text file in which the best-fit
3172 parameter values to be written
3173 (default is "": no file/logger output)
3174 csvformat: if True blfile is csv-formatted, default is False.
3175
3176 Example:
3177 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
3178
3179 Note:
3180 The best-fit parameter values output in logger and/or blfile are now
3181 based on specunit of 'channel'.
3182 """
3183
3184 try:
3185 varlist = vars()
3186
3187 if insitu is None: insitu = rcParams['insitu']
3188 if insitu:
3189 workscan = self
3190 else:
3191 workscan = self.copy()
3192
3193 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
3194 if mask is None: mask = []
3195 if order is None: order = 5
3196 if clipthresh is None: clipthresh = 3.0
3197 if clipniter is None: clipniter = 0
3198 if edge is None: edge = (0, 0)
3199 if threshold is None: threshold = 3
3200 if chan_avg_limit is None: chan_avg_limit = 1
3201 if plot is None: plot = False
3202 if getresidual is None: getresidual = True
3203 if showprogress is None: showprogress = True
3204 if minnrow is None: minnrow = 1000
3205 if outlog is None: outlog = False
3206 if blfile is None: blfile = ''
3207 if csvformat is None: csvformat = False
3208
3209 if csvformat:
3210 scsvformat = "T"
3211 else:
3212 scsvformat = "F"
3213
3214 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
3215 workscan._auto_chebyshev_baseline(mask, order, clipthresh,
3216 clipniter,
3217 normalise_edge_param(edge),
3218 threshold,
3219 chan_avg_limit, getresidual,
3220 pack_progress_params(showprogress,
3221 minnrow),
3222 outlog, scsvformat+blfile)
3223 workscan._add_history("auto_chebyshev_baseline", varlist)
3224
3225 if insitu:
3226 self._assign(workscan)
3227 else:
3228 return workscan
3229
3230 except RuntimeError, e:
3231 raise_fitting_failure_exception(e)
3232
3233 @asaplog_post_dec
3234 def poly_baseline(self, mask=None, order=None, insitu=None, plot=None,
3235 getresidual=None, showprogress=None, minnrow=None,
3236 outlog=None, blfile=None, csvformat=None):
3237 """\
3238 Return a scan which has been baselined (all rows) by a polynomial.
3239 Parameters:
3240 insitu: if False a new scantable is returned.
3241 Otherwise, the scaling is done in-situ
3242 The default is taken from .asaprc (False)
3243 mask: an optional mask
3244 order: the order of the polynomial (default is 0)
3245 plot: plot the fit and the residual. In this each
3246 indivual fit has to be approved, by typing 'y'
3247 or 'n'
3248 getresidual: if False, returns best-fit values instead of
3249 residual. (default is True)
3250 showprogress: show progress status for large data.
3251 default is True.
3252 minnrow: minimum number of input spectra to show.
3253 default is 1000.
3254 outlog: Output the coefficients of the best-fit
3255 function to logger (default is False)
3256 blfile: Name of a text file in which the best-fit
3257 parameter values to be written
3258 (default is "": no file/logger output)
3259 csvformat: if True blfile is csv-formatted, default is False.
3260
3261 Example:
3262 # return a scan baselined by a third order polynomial,
3263 # not using a mask
3264 bscan = scan.poly_baseline(order=3)
3265 """
3266
3267 try:
3268 varlist = vars()
3269
3270 if insitu is None:
3271 insitu = rcParams["insitu"]
3272 if insitu:
3273 workscan = self
3274 else:
3275 workscan = self.copy()
3276
3277 #if mask is None: mask = [True for i in \
3278 # xrange(workscan.nchan())]
3279 if mask is None: mask = []
3280 if order is None: order = 0
3281 if plot is None: plot = False
3282 if getresidual is None: getresidual = True
3283 if showprogress is None: showprogress = True
3284 if minnrow is None: minnrow = 1000
3285 if outlog is None: outlog = False
3286 if blfile is None: blfile = ""
3287 if csvformat is None: csvformat = False
3288
3289 if csvformat:
3290 scsvformat = "T"
3291 else:
3292 scsvformat = "F"
3293
3294 if plot:
3295 outblfile = (blfile != "") and \
3296 os.path.exists(os.path.expanduser(
3297 os.path.expandvars(blfile))
3298 )
3299 if outblfile:
3300 blf = open(blfile, "a")
3301
3302 f = fitter()
3303 f.set_function(lpoly=order)
3304
3305 rows = xrange(workscan.nrow())
3306 #if len(rows) > 0: workscan._init_blinfo()
3307
3308 action = "H"
3309 for r in rows:
3310 f.x = workscan._getabcissa(r)
3311 f.y = workscan._getspectrum(r)
3312 if mask:
3313 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
3314 else: # mask=None
3315 f.mask = workscan._getmask(r)
3316
3317 f.data = None
3318 f.fit()
3319
3320 if action != "Y": # skip plotting when accepting all
3321 f.plot(residual=True)
3322 #accept_fit = raw_input("Accept fit ( [y]/n ): ")
3323 #if accept_fit.upper() == "N":
3324 # #workscan._append_blinfo(None, None, None)
3325 # continue
3326 accept_fit = self._get_verify_action("Accept fit?",action)
3327 if r == 0: action = None
3328 if accept_fit.upper() == "N":
3329 continue
3330 elif accept_fit.upper() == "R":
3331 break
3332 elif accept_fit.upper() == "A":
3333 action = "Y"
3334
3335 blpars = f.get_parameters()
3336 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
3337 #workscan._append_blinfo(blpars, masklist, f.mask)
3338 workscan._setspectrum((f.fitter.getresidual()
3339 if getresidual else f.fitter.getfit()), r)
3340
3341 if outblfile:
3342 rms = workscan.get_rms(f.mask, r)
3343 dataout = \
3344 workscan.format_blparams_row(blpars["params"],
3345 blpars["fixed"],
3346 rms, str(masklist),
3347 r, True, csvformat)
3348 blf.write(dataout)
3349
3350 f._p.unmap()
3351 f._p = None
3352
3353 if outblfile:
3354 blf.close()
3355 else:
3356 workscan._poly_baseline(mask, order, getresidual,
3357 pack_progress_params(showprogress,
3358 minnrow),
3359 outlog, scsvformat+blfile)
3360
3361 workscan._add_history("poly_baseline", varlist)
3362
3363 if insitu:
3364 self._assign(workscan)
3365 else:
3366 return workscan
3367
3368 except RuntimeError, e:
3369 raise_fitting_failure_exception(e)
3370
3371 @asaplog_post_dec
3372 def auto_poly_baseline(self, mask=None, order=None, edge=None,
3373 threshold=None, chan_avg_limit=None,
3374 plot=None, insitu=None,
3375 getresidual=None, showprogress=None,
3376 minnrow=None, outlog=None, blfile=None, csvformat=None):
3377 """\
3378 Return a scan which has been baselined (all rows) by a polynomial.
3379 Spectral lines are detected first using linefinder and masked out
3380 to avoid them affecting the baseline solution.
3381
3382 Parameters:
3383 insitu: if False a new scantable is returned.
3384 Otherwise, the scaling is done in-situ
3385 The default is taken from .asaprc (False)
3386 mask: an optional mask retreived from scantable
3387 order: the order of the polynomial (default is 0)
3388 edge: an optional number of channel to drop at
3389 the edge of spectrum. If only one value is
3390 specified, the same number will be dropped
3391 from both sides of the spectrum. Default
3392 is to keep all channels. Nested tuples
3393 represent individual edge selection for
3394 different IFs (a number of spectral channels
3395 can be different)
3396 threshold: the threshold used by line finder. It is
3397 better to keep it large as only strong lines
3398 affect the baseline solution.
3399 chan_avg_limit: a maximum number of consequtive spectral
3400 channels to average during the search of
3401 weak and broad lines. The default is no
3402 averaging (and no search for weak lines).
3403 If such lines can affect the fitted baseline
3404 (e.g. a high order polynomial is fitted),
3405 increase this parameter (usually values up
3406 to 8 are reasonable). Most users of this
3407 method should find the default value sufficient.
3408 plot: plot the fit and the residual. In this each
3409 indivual fit has to be approved, by typing 'y'
3410 or 'n'
3411 getresidual: if False, returns best-fit values instead of
3412 residual. (default is True)
3413 showprogress: show progress status for large data.
3414 default is True.
3415 minnrow: minimum number of input spectra to show.
3416 default is 1000.
3417 outlog: Output the coefficients of the best-fit
3418 function to logger (default is False)
3419 blfile: Name of a text file in which the best-fit
3420 parameter values to be written
3421 (default is "": no file/logger output)
3422 csvformat: if True blfile is csv-formatted, default is False.
3423
3424 Example:
3425 bscan = scan.auto_poly_baseline(order=7, insitu=False)
3426 """
3427
3428 try:
3429 varlist = vars()
3430
3431 if insitu is None:
3432 insitu = rcParams['insitu']
3433 if insitu:
3434 workscan = self
3435 else:
3436 workscan = self.copy()
3437
3438 #if mask is None: mask = [True for i in xrange(workscan.nchan())]
3439 if mask is None: mask = []
3440 if order is None: order = 0
3441 if edge is None: edge = (0, 0)
3442 if threshold is None: threshold = 3
3443 if chan_avg_limit is None: chan_avg_limit = 1
3444 if plot is None: plot = False
3445 if getresidual is None: getresidual = True
3446 if showprogress is None: showprogress = True
3447 if minnrow is None: minnrow = 1000
3448 if outlog is None: outlog = False
3449 if blfile is None: blfile = ''
3450 if csvformat is None: csvformat = False
3451
3452 if csvformat:
3453 scsvformat = "T"
3454 else:
3455 scsvformat = "F"
3456
3457 edge = normalise_edge_param(edge)
3458
3459 if plot:
3460 outblfile = (blfile != "") and \
3461 os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
3462 if outblfile: blf = open(blfile, "a")
3463
3464 from asap.asaplinefind import linefinder
3465 fl = linefinder()
3466 fl.set_options(threshold=threshold, avg_limit=chan_avg_limit)
3467 fl.set_scan(workscan)
3468
3469 f = fitter()
3470 f.set_function(lpoly=order)
3471
3472 rows = xrange(workscan.nrow())
3473 #if len(rows) > 0: workscan._init_blinfo()
3474
3475 action = "H"
3476 for r in rows:
3477 idx = 2*workscan.getif(r)
3478 if mask:
3479 msk = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
3480 else: # mask=None
3481 msk = workscan._getmask(r)
3482 fl.find_lines(r, msk, edge[idx:idx+2])
3483
3484 f.x = workscan._getabcissa(r)
3485 f.y = workscan._getspectrum(r)
3486 f.mask = fl.get_mask()
3487 f.data = None
3488 f.fit()
3489
3490 if action != "Y": # skip plotting when accepting all
3491 f.plot(residual=True)
3492 #accept_fit = raw_input("Accept fit ( [y]/n ): ")
3493 accept_fit = self._get_verify_action("Accept fit?",action)
3494 if r == 0: action = None
3495 if accept_fit.upper() == "N":
3496 #workscan._append_blinfo(None, None, None)
3497 continue
3498 elif accept_fit.upper() == "R":
3499 break
3500 elif accept_fit.upper() == "A":
3501 action = "Y"
3502
3503 blpars = f.get_parameters()
3504 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
3505 #workscan._append_blinfo(blpars, masklist, f.mask)
3506 workscan._setspectrum(
3507 (f.fitter.getresidual() if getresidual
3508 else f.fitter.getfit()), r
3509 )
3510
3511 if outblfile:
3512 rms = workscan.get_rms(f.mask, r)
3513 dataout = \
3514 workscan.format_blparams_row(blpars["params"],
3515 blpars["fixed"],
3516 rms, str(masklist),
3517 r, True, csvformat)
3518 blf.write(dataout)
3519
3520 f._p.unmap()
3521 f._p = None
3522
3523 if outblfile: blf.close()
3524 else:
3525 workscan._auto_poly_baseline(mask, order, edge, threshold,
3526 chan_avg_limit, getresidual,
3527 pack_progress_params(showprogress,
3528 minnrow),
3529 outlog, scsvformat+blfile)
3530
3531 workscan._add_history("auto_poly_baseline", varlist)
3532
3533 if insitu:
3534 self._assign(workscan)
3535 else:
3536 return workscan
3537
3538 except RuntimeError, e:
3539 raise_fitting_failure_exception(e)
3540
3541 def _init_blinfo(self):
3542 """\
3543 Initialise the following three auxiliary members:
3544 blpars : parameters of the best-fit baseline,
3545 masklists : mask data (edge positions of masked channels) and
3546 actualmask : mask data (in boolean list),
3547 to keep for use later (including output to logger/text files).
3548 Used by poly_baseline() and auto_poly_baseline() in case of
3549 'plot=True'.
3550 """
3551 self.blpars = []
3552 self.masklists = []
3553 self.actualmask = []
3554 return
3555
3556 def _append_blinfo(self, data_blpars, data_masklists, data_actualmask):
3557 """\
3558 Append baseline-fitting related info to blpars, masklist and
3559 actualmask.
3560 """
3561 self.blpars.append(data_blpars)
3562 self.masklists.append(data_masklists)
3563 self.actualmask.append(data_actualmask)
3564 return
3565
3566 @asaplog_post_dec
3567 def rotate_linpolphase(self, angle):
3568 """\
3569 Rotate the phase of the complex polarization O=Q+iU correlation.
3570 This is always done in situ in the raw data. So if you call this
3571 function more than once then each call rotates the phase further.
3572
3573 Parameters:
3574
3575 angle: The angle (degrees) to rotate (add) by.
3576
3577 Example::
3578
3579 scan.rotate_linpolphase(2.3)
3580
3581 """
3582 varlist = vars()
3583 self._math._rotate_linpolphase(self, angle)
3584 self._add_history("rotate_linpolphase", varlist)
3585 return
3586
3587 @asaplog_post_dec
3588 def rotate_xyphase(self, angle):
3589 """\
3590 Rotate the phase of the XY correlation. This is always done in situ
3591 in the data. So if you call this function more than once
3592 then each call rotates the phase further.
3593
3594 Parameters:
3595
3596 angle: The angle (degrees) to rotate (add) by.
3597
3598 Example::
3599
3600 scan.rotate_xyphase(2.3)
3601
3602 """
3603 varlist = vars()
3604 self._math._rotate_xyphase(self, angle)
3605 self._add_history("rotate_xyphase", varlist)
3606 return
3607
3608 @asaplog_post_dec
3609 def swap_linears(self):
3610 """\
3611 Swap the linear polarisations XX and YY, or better the first two
3612 polarisations as this also works for ciculars.
3613 """
3614 varlist = vars()
3615 self._math._swap_linears(self)
3616 self._add_history("swap_linears", varlist)
3617 return
3618
3619 @asaplog_post_dec
3620 def invert_phase(self):
3621 """\
3622 Invert the phase of the complex polarisation
3623 """
3624 varlist = vars()
3625 self._math._invert_phase(self)
3626 self._add_history("invert_phase", varlist)
3627 return
3628
3629 @asaplog_post_dec
3630 def add(self, offset, insitu=None):
3631 """\
3632 Return a scan where all spectra have the offset added
3633
3634 Parameters:
3635
3636 offset: the offset
3637
3638 insitu: if False a new scantable is returned.
3639 Otherwise, the scaling is done in-situ
3640 The default is taken from .asaprc (False)
3641
3642 """
3643 if insitu is None: insitu = rcParams['insitu']
3644 self._math._setinsitu(insitu)
3645 varlist = vars()
3646 s = scantable(self._math._unaryop(self, offset, "ADD", False))
3647 s._add_history("add", varlist)
3648 if insitu:
3649 self._assign(s)
3650 else:
3651 return s
3652
3653 @asaplog_post_dec
3654 def scale(self, factor, tsys=True, insitu=None):
3655 """\
3656
3657 Return a scan where all spectra are scaled by the given 'factor'
3658
3659 Parameters:
3660
3661 factor: the scaling factor (float or 1D float list)
3662
3663 insitu: if False a new scantable is returned.
3664 Otherwise, the scaling is done in-situ
3665 The default is taken from .asaprc (False)
3666
3667 tsys: if True (default) then apply the operation to Tsys
3668 as well as the data
3669
3670 """
3671 if insitu is None: insitu = rcParams['insitu']
3672 self._math._setinsitu(insitu)
3673 varlist = vars()
3674 s = None
3675 import numpy
3676 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
3677 if isinstance(factor[0], list) or isinstance(factor[0],
3678 numpy.ndarray):
3679 from asapmath import _array2dOp
3680 s = _array2dOp( self, factor, "MUL", tsys, insitu )
3681 else:
3682 s = scantable( self._math._arrayop( self, factor,
3683 "MUL", tsys ) )
3684 else:
3685 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
3686 s._add_history("scale", varlist)
3687 if insitu:
3688 self._assign(s)
3689 else:
3690 return s
3691
3692 @preserve_selection
3693 def set_sourcetype(self, match, matchtype="pattern",
3694 sourcetype="reference"):
3695 """\
3696 Set the type of the source to be an source or reference scan
3697 using the provided pattern.
3698
3699 Parameters:
3700
3701 match: a Unix style pattern, regular expression or selector
3702
3703 matchtype: 'pattern' (default) UNIX style pattern or
3704 'regex' regular expression
3705
3706 sourcetype: the type of the source to use (source/reference)
3707
3708 """
3709 varlist = vars()
3710 stype = -1
3711 if sourcetype.lower().startswith("r") or sourcetype.lower() == "off":
3712 stype = 1
3713 elif sourcetype.lower().startswith("s") or sourcetype.lower() == "on":
3714 stype = 0
3715 else:
3716 raise ValueError("Illegal sourcetype use s(ource)/on or r(eference)/off")
3717 if matchtype.lower().startswith("p"):
3718 matchtype = "pattern"
3719 elif matchtype.lower().startswith("r"):
3720 matchtype = "regex"
3721 else:
3722 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
3723 sel = selector()
3724 if isinstance(match, selector):
3725 sel = match
3726 else:
3727 sel.set_query("SRCNAME=%s('%s')" % (matchtype, match))
3728 self.set_selection(sel)
3729 self._setsourcetype(stype)
3730 self._add_history("set_sourcetype", varlist)
3731
3732 @asaplog_post_dec
3733 @preserve_selection
3734 def auto_quotient(self, preserve=True, mode='paired', verify=False):
3735 """\
3736 This function allows to build quotients automatically.
3737 It assumes the observation to have the same number of
3738 "ons" and "offs"
3739
3740 Parameters:
3741
3742 preserve: you can preserve (default) the continuum or
3743 remove it. The equations used are
3744
3745 preserve: Output = Toff * (on/off) - Toff
3746
3747 remove: Output = Toff * (on/off) - Ton
3748
3749 mode: the on/off detection mode
3750 'paired' (default)
3751 identifies 'off' scans by the
3752 trailing '_R' (Mopra/Parkes) or
3753 '_e'/'_w' (Tid) and matches
3754 on/off pairs from the observing pattern
3755 'time'
3756 finds the closest off in time
3757
3758 .. todo:: verify argument is not implemented
3759
3760 """
3761 varlist = vars()
3762 modes = ["time", "paired"]
3763 if not mode in modes:
3764 msg = "please provide valid mode. Valid modes are %s" % (modes)
3765 raise ValueError(msg)
3766 s = None
3767 if mode.lower() == "paired":
3768 sel = self.get_selection()
3769 sel.set_query("SRCTYPE==psoff")
3770 self.set_selection(sel)
3771 offs = self.copy()
3772 sel.set_query("SRCTYPE==pson")
3773 self.set_selection(sel)
3774 ons = self.copy()
3775 s = scantable(self._math._quotient(ons, offs, preserve))
3776 elif mode.lower() == "time":
3777 s = scantable(self._math._auto_quotient(self, mode, preserve))
3778 s._add_history("auto_quotient", varlist)
3779 return s
3780
3781 @asaplog_post_dec
3782 def mx_quotient(self, mask = None, weight='median', preserve=True):
3783 """\
3784 Form a quotient using "off" beams when observing in "MX" mode.
3785
3786 Parameters:
3787
3788 mask: an optional mask to be used when weight == 'stddev'
3789
3790 weight: How to average the off beams. Default is 'median'.
3791
3792 preserve: you can preserve (default) the continuum or
3793 remove it. The equations used are:
3794
3795 preserve: Output = Toff * (on/off) - Toff
3796
3797 remove: Output = Toff * (on/off) - Ton
3798
3799 """
3800 mask = mask or ()
3801 varlist = vars()
3802 on = scantable(self._math._mx_extract(self, 'on'))
3803 preoff = scantable(self._math._mx_extract(self, 'off'))
3804 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
3805 from asapmath import quotient
3806 q = quotient(on, off, preserve)
3807 q._add_history("mx_quotient", varlist)
3808 return q
3809
3810 @asaplog_post_dec
3811 def freq_switch(self, insitu=None):
3812 """\
3813 Apply frequency switching to the data.
3814
3815 Parameters:
3816
3817 insitu: if False a new scantable is returned.
3818 Otherwise, the swictching is done in-situ
3819 The default is taken from .asaprc (False)
3820
3821 """
3822 if insitu is None: insitu = rcParams['insitu']
3823 self._math._setinsitu(insitu)
3824 varlist = vars()
3825 s = scantable(self._math._freqswitch(self))
3826 s._add_history("freq_switch", varlist)
3827 if insitu:
3828 self._assign(s)
3829 else:
3830 return s
3831
3832 @asaplog_post_dec
3833 def recalc_azel(self):
3834 """Recalculate the azimuth and elevation for each position."""
3835 varlist = vars()
3836 self._recalcazel()
3837 self._add_history("recalc_azel", varlist)
3838 return
3839
3840 @asaplog_post_dec
3841 def __add__(self, other):
3842 """
3843 implicit on all axes and on Tsys
3844 """
3845 varlist = vars()
3846 s = self.__op( other, "ADD" )
3847 s._add_history("operator +", varlist)
3848 return s
3849
3850 @asaplog_post_dec
3851 def __sub__(self, other):
3852 """
3853 implicit on all axes and on Tsys
3854 """
3855 varlist = vars()
3856 s = self.__op( other, "SUB" )
3857 s._add_history("operator -", varlist)
3858 return s
3859
3860 @asaplog_post_dec
3861 def __mul__(self, other):
3862 """
3863 implicit on all axes and on Tsys
3864 """
3865 varlist = vars()
3866 s = self.__op( other, "MUL" ) ;
3867 s._add_history("operator *", varlist)
3868 return s
3869
3870
3871 @asaplog_post_dec
3872 def __div__(self, other):
3873 """
3874 implicit on all axes and on Tsys
3875 """
3876 varlist = vars()
3877 s = self.__op( other, "DIV" )
3878 s._add_history("operator /", varlist)
3879 return s
3880
3881 @asaplog_post_dec
3882 def __op( self, other, mode ):
3883 s = None
3884 if isinstance(other, scantable):
3885 s = scantable(self._math._binaryop(self, other, mode))
3886 elif isinstance(other, float):
3887 if other == 0.0:
3888 raise ZeroDivisionError("Dividing by zero is not recommended")
3889 s = scantable(self._math._unaryop(self, other, mode, False))
3890 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3891 if isinstance(other[0], list) \
3892 or isinstance(other[0], numpy.ndarray):
3893 from asapmath import _array2dOp
3894 s = _array2dOp( self, other, mode, False )
3895 else:
3896 s = scantable( self._math._arrayop( self, other,
3897 mode, False ) )
3898 else:
3899 raise TypeError("Other input is not a scantable or float value")
3900 return s
3901
3902 @asaplog_post_dec
3903 def get_fit(self, row=0):
3904 """\
3905 Print or return the stored fits for a row in the scantable
3906
3907 Parameters:
3908
3909 row: the row which the fit has been applied to.
3910
3911 """
3912 if row > self.nrow():
3913 return
3914 from asap.asapfit import asapfit
3915 fit = asapfit(self._getfit(row))
3916 asaplog.push( '%s' %(fit) )
3917 return fit.as_dict()
3918
3919 @preserve_selection
3920 def flag_nans(self):
3921 """\
3922 Utility function to flag NaN values in the scantable.
3923 """
3924 import numpy
3925 basesel = self.get_selection()
3926 for i in range(self.nrow()):
3927 sel = self.get_row_selector(i)
3928 self.set_selection(basesel+sel)
3929 nans = numpy.isnan(self._getspectrum(0))
3930 if numpy.any(nans):
3931 bnans = [ bool(v) for v in nans]
3932 self.flag(bnans)
3933
3934 def get_row_selector(self, rowno):
3935 return selector(rows=[rowno])
3936
3937 def _add_history(self, funcname, parameters):
3938 if not rcParams['scantable.history']:
3939 return
3940 # create date
3941 sep = "##"
3942 from datetime import datetime
3943 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
3944 hist = dstr+sep
3945 hist += funcname+sep#cdate+sep
3946 if parameters.has_key('self'):
3947 del parameters['self']
3948 for k, v in parameters.iteritems():
3949 if type(v) is dict:
3950 for k2, v2 in v.iteritems():
3951 hist += k2
3952 hist += "="
3953 if isinstance(v2, scantable):
3954 hist += 'scantable'
3955 elif k2 == 'mask':
3956 if isinstance(v2, list) or isinstance(v2, tuple):
3957 hist += str(self._zip_mask(v2))
3958 else:
3959 hist += str(v2)
3960 else:
3961 hist += str(v2)
3962 else:
3963 hist += k
3964 hist += "="
3965 if isinstance(v, scantable):
3966 hist += 'scantable'
3967 elif k == 'mask':
3968 if isinstance(v, list) or isinstance(v, tuple):
3969 hist += str(self._zip_mask(v))
3970 else:
3971 hist += str(v)
3972 else:
3973 hist += str(v)
3974 hist += sep
3975 hist = hist[:-2] # remove trailing '##'
3976 self._addhistory(hist)
3977
3978
3979 def _zip_mask(self, mask):
3980 mask = list(mask)
3981 i = 0
3982 segments = []
3983 while mask[i:].count(1):
3984 i += mask[i:].index(1)
3985 if mask[i:].count(0):
3986 j = i + mask[i:].index(0)
3987 else:
3988 j = len(mask)
3989 segments.append([i, j])
3990 i = j
3991 return segments
3992
3993 def _get_ordinate_label(self):
3994 fu = "("+self.get_fluxunit()+")"
3995 import re
3996 lbl = "Intensity"
3997 if re.match(".K.", fu):
3998 lbl = "Brightness Temperature "+ fu
3999 elif re.match(".Jy.", fu):
4000 lbl = "Flux density "+ fu
4001 return lbl
4002
4003 def _check_ifs(self):
4004# return len(set([self.nchan(i) for i in self.getifnos()])) == 1
4005 nchans = [self.nchan(i) for i in self.getifnos()]
4006 nchans = filter(lambda t: t > 0, nchans)
4007 return (sum(nchans)/len(nchans) == nchans[0])
4008
4009 @asaplog_post_dec
4010 def _fill(self, names, unit, average, opts={}):
4011 first = True
4012 fullnames = []
4013 for name in names:
4014 name = os.path.expandvars(name)
4015 name = os.path.expanduser(name)
4016 if not os.path.exists(name):
4017 msg = "File '%s' does not exists" % (name)
4018 raise IOError(msg)
4019 fullnames.append(name)
4020 if average:
4021 asaplog.push('Auto averaging integrations')
4022 stype = int(rcParams['scantable.storage'].lower() == 'disk')
4023 for name in fullnames:
4024 tbl = Scantable(stype)
4025 if is_ms( name ):
4026 r = msfiller( tbl )
4027 else:
4028 r = filler( tbl )
4029 msg = "Importing %s..." % (name)
4030 asaplog.push(msg, False)
4031 r.open(name, opts)
4032 rx = rcParams['scantable.reference']
4033 r.setreferenceexpr(rx)
4034 r.fill()
4035 if average:
4036 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
4037 if not first:
4038 tbl = self._math._merge([self, tbl])
4039 Scantable.__init__(self, tbl)
4040 r.close()
4041 del r, tbl
4042 first = False
4043 #flush log
4044 asaplog.post()
4045 if unit is not None:
4046 self.set_fluxunit(unit)
4047 if not is_casapy():
4048 self.set_freqframe(rcParams['scantable.freqframe'])
4049
4050 def _get_verify_action( self, msg, action=None ):
4051 valid_act = ['Y', 'N', 'A', 'R']
4052 if not action or not isinstance(action, str):
4053 action = raw_input("%s [Y/n/a/r] (h for help): " % msg)
4054 if action == '':
4055 return "Y"
4056 elif (action.upper()[0] in valid_act):
4057 return action.upper()[0]
4058 elif (action.upper()[0] in ['H','?']):
4059 print "Available actions of verification [Y|n|a|r]"
4060 print " Y : Yes for current data (default)"
4061 print " N : No for current data"
4062 print " A : Accept all in the following and exit from verification"
4063 print " R : Reject all in the following and exit from verification"
4064 print " H or ?: help (show this message)"
4065 return self._get_verify_action(msg)
4066 else:
4067 return 'Y'
4068
4069 def __getitem__(self, key):
4070 if key < 0:
4071 key += self.nrow()
4072 if key >= self.nrow():
4073 raise IndexError("Row index out of range.")
4074 return self._getspectrum(key)
4075
4076 def __setitem__(self, key, value):
4077 if key < 0:
4078 key += self.nrow()
4079 if key >= self.nrow():
4080 raise IndexError("Row index out of range.")
4081 if not hasattr(value, "__len__") or \
4082 len(value) > self.nchan(self.getif(key)):
4083 raise ValueError("Spectrum length doesn't match.")
4084 return self._setspectrum(value, key)
4085
4086 def __len__(self):
4087 return self.nrow()
4088
4089 def __iter__(self):
4090 for i in range(len(self)):
4091 yield self[i]
Note: See TracBrowser for help on using the repository browser.