source: trunk/python/scantable.py@ 2178

Last change on this file since 2178 was 2178, checked in by Malte Marquarding, 14 years ago

Removed redundant argument 'verbose'

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 128.1 KB
Line 
1"""This module defines the scantable class."""
2
3import os
4import numpy
5try:
6 from functools import wraps as wraps_dec
7except ImportError:
8 from asap.compatibility import wraps as wraps_dec
9
10from asap.env import is_casapy
11from asap._asap import Scantable
12from asap._asap import filler, msfiller
13from asap.parameters import rcParams
14from asap.logging import asaplog, asaplog_post_dec
15from asap.selector import selector
16from asap.linecatalog import linecatalog
17from asap.coordinate import coordinate
18from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
19from asap.asapfitter import fitter
20
21
22def preserve_selection(func):
23 @wraps_dec(func)
24 def wrap(obj, *args, **kw):
25 basesel = obj.get_selection()
26 try:
27 val = func(obj, *args, **kw)
28 finally:
29 obj.set_selection(basesel)
30 return val
31 return wrap
32
33def is_scantable(filename):
34 """Is the given file a scantable?
35
36 Parameters:
37
38 filename: the name of the file/directory to test
39
40 """
41 if ( os.path.isdir(filename)
42 and os.path.exists(filename+'/table.info')
43 and os.path.exists(filename+'/table.dat') ):
44 f=open(filename+'/table.info')
45 l=f.readline()
46 f.close()
47 #if ( l.find('Scantable') != -1 ):
48 if ( l.find('Measurement Set') == -1 ):
49 return True
50 else:
51 return False
52 else:
53 return False
54## return (os.path.isdir(filename)
55## and not os.path.exists(filename+'/table.f1')
56## and os.path.exists(filename+'/table.info'))
57
58def is_ms(filename):
59 """Is the given file a MeasurementSet?
60
61 Parameters:
62
63 filename: the name of the file/directory to test
64
65 """
66 if ( os.path.isdir(filename)
67 and os.path.exists(filename+'/table.info')
68 and os.path.exists(filename+'/table.dat') ):
69 f=open(filename+'/table.info')
70 l=f.readline()
71 f.close()
72 if ( l.find('Measurement Set') != -1 ):
73 return True
74 else:
75 return False
76 else:
77 return False
78
79class scantable(Scantable):
80 """\
81 The ASAP container for scans (single-dish data).
82 """
83
84 @asaplog_post_dec
85 #def __init__(self, filename, average=None, unit=None, getpt=None,
86 # antenna=None, parallactify=None):
87 def __init__(self, filename, average=None, unit=None, parallactify=None, **args):
88 """\
89 Create a scantable from a saved one or make a reference
90
91 Parameters:
92
93 filename: the name of an asap table on disk
94 or
95 the name of a rpfits/sdfits/ms file
96 (integrations within scans are auto averaged
97 and the whole file is read) or
98 [advanced] a reference to an existing scantable
99
100 average: average all integrations withinb a scan on read.
101 The default (True) is taken from .asaprc.
102
103 unit: brightness unit; must be consistent with K or Jy.
104 Over-rides the default selected by the filler
105 (input rpfits/sdfits/ms) or replaces the value
106 in existing scantables
107
108 getpt: for MeasurementSet input data only:
109 If True, all pointing data are filled.
110 The deafult is False, which makes time to load
111 the MS data faster in some cases.
112
113 antenna: for MeasurementSet input data only:
114 Antenna selection. integer (id) or string (name or id).
115
116 parallactify: Indicate that the data had been parallatified. Default
117 is taken from rc file.
118
119 """
120 if average is None:
121 average = rcParams['scantable.autoaverage']
122 #if getpt is None:
123 # getpt = True
124 #if antenna is not None:
125 # asaplog.push("Antenna selection currently unsupported."
126 # "Using ''")
127 # asaplog.post('WARN')
128 #if antenna is None:
129 # antenna = ''
130 #elif type(antenna) == int:
131 # antenna = '%s' % antenna
132 #elif type(antenna) == list:
133 # tmpstr = ''
134 # for i in range( len(antenna) ):
135 # if type(antenna[i]) == int:
136 # tmpstr = tmpstr + ('%s,'%(antenna[i]))
137 # elif type(antenna[i]) == str:
138 # tmpstr=tmpstr+antenna[i]+','
139 # else:
140 # raise TypeError('Bad antenna selection.')
141 # antenna = tmpstr.rstrip(',')
142 parallactify = parallactify or rcParams['scantable.parallactify']
143 varlist = vars()
144 from asap._asap import stmath
145 self._math = stmath( rcParams['insitu'] )
146 if isinstance(filename, Scantable):
147 Scantable.__init__(self, filename)
148 else:
149 if isinstance(filename, str):
150 filename = os.path.expandvars(filename)
151 filename = os.path.expanduser(filename)
152 if not os.path.exists(filename):
153 s = "File '%s' not found." % (filename)
154 raise IOError(s)
155 if is_scantable(filename):
156 ondisk = rcParams['scantable.storage'] == 'disk'
157 Scantable.__init__(self, filename, ondisk)
158 if unit is not None:
159 self.set_fluxunit(unit)
160 if average:
161 self._assign( self.average_time( scanav=True ) )
162 # do not reset to the default freqframe
163 #self.set_freqframe(rcParams['scantable.freqframe'])
164 #elif os.path.isdir(filename) \
165 # and not os.path.exists(filename+'/table.f1'):
166 elif is_ms(filename):
167 # Measurement Set
168 opts={'ms': {}}
169 mskeys=['getpt','antenna']
170 for key in mskeys:
171 if key in args.keys():
172 opts['ms'][key] = args[key]
173 #self._fill([filename], unit, average, getpt, antenna)
174 self._fill([filename], unit, average, opts)
175 elif os.path.isfile(filename):
176 #self._fill([filename], unit, average, getpt, antenna)
177 self._fill([filename], unit, average)
178 else:
179 msg = "The given file '%s'is not a valid " \
180 "asap table." % (filename)
181 raise IOError(msg)
182 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
183 and isinstance(filename[-1], str):
184 #self._fill(filename, unit, average, getpt, antenna)
185 self._fill(filename, unit, average)
186 self.parallactify(parallactify)
187 self._add_history("scantable", varlist)
188
189 @asaplog_post_dec
190 def save(self, name=None, format=None, overwrite=False):
191 """\
192 Store the scantable on disk. This can be an asap (aips++) Table,
193 SDFITS or MS2 format.
194
195 Parameters:
196
197 name: the name of the outputfile. For format "ASCII"
198 this is the root file name (data in 'name'.txt
199 and header in 'name'_header.txt)
200
201 format: an optional file format. Default is ASAP.
202 Allowed are:
203
204 * 'ASAP' (save as ASAP [aips++] Table),
205 * 'SDFITS' (save as SDFITS file)
206 * 'ASCII' (saves as ascii text file)
207 * 'MS2' (saves as an casacore MeasurementSet V2)
208 * 'FITS' (save as image FITS - not readable by class)
209 * 'CLASS' (save as FITS readable by CLASS)
210
211 overwrite: If the file should be overwritten if it exists.
212 The default False is to return with warning
213 without writing the output. USE WITH CARE.
214
215 Example::
216
217 scan.save('myscan.asap')
218 scan.save('myscan.sdfits', 'SDFITS')
219
220 """
221 from os import path
222 format = format or rcParams['scantable.save']
223 suffix = '.'+format.lower()
224 if name is None or name == "":
225 name = 'scantable'+suffix
226 msg = "No filename given. Using default name %s..." % name
227 asaplog.push(msg)
228 name = path.expandvars(name)
229 if path.isfile(name) or path.isdir(name):
230 if not overwrite:
231 msg = "File %s exists." % name
232 raise IOError(msg)
233 format2 = format.upper()
234 if format2 == 'ASAP':
235 self._save(name)
236 elif format2 == 'MS2':
237 msopt = {'ms': {'overwrite': overwrite } }
238 from asap._asap import mswriter
239 writer = mswriter( self )
240 writer.write( name, msopt )
241 else:
242 from asap._asap import stwriter as stw
243 writer = stw(format2)
244 writer.write(self, name)
245 return
246
247 def copy(self):
248 """Return a copy of this scantable.
249
250 *Note*:
251
252 This makes a full (deep) copy. scan2 = scan1 makes a reference.
253
254 Example::
255
256 copiedscan = scan.copy()
257
258 """
259 sd = scantable(Scantable._copy(self))
260 return sd
261
262 def drop_scan(self, scanid=None):
263 """\
264 Return a new scantable where the specified scan number(s) has(have)
265 been dropped.
266
267 Parameters:
268
269 scanid: a (list of) scan number(s)
270
271 """
272 from asap import _is_sequence_or_number as _is_valid
273 from asap import _to_list
274 from asap import unique
275 if not _is_valid(scanid):
276 raise RuntimeError( 'Please specify a scanno to drop from the scantable' )
277 scanid = _to_list(scanid)
278 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
279 for sid in scanid: allscans.remove(sid)
280 if len(allscans) == 0:
281 raise ValueError("Can't remove all scans")
282 sel = selector(scans=allscans)
283 return self._select_copy(sel)
284
285 def _select_copy(self, selection):
286 orig = self.get_selection()
287 self.set_selection(orig+selection)
288 cp = self.copy()
289 self.set_selection(orig)
290 return cp
291
292 def get_scan(self, scanid=None):
293 """\
294 Return a specific scan (by scanno) or collection of scans (by
295 source name) in a new scantable.
296
297 *Note*:
298
299 See scantable.drop_scan() for the inverse operation.
300
301 Parameters:
302
303 scanid: a (list of) scanno or a source name, unix-style
304 patterns are accepted for source name matching, e.g.
305 '*_R' gets all 'ref scans
306
307 Example::
308
309 # get all scans containing the source '323p459'
310 newscan = scan.get_scan('323p459')
311 # get all 'off' scans
312 refscans = scan.get_scan('*_R')
313 # get a susbset of scans by scanno (as listed in scan.summary())
314 newscan = scan.get_scan([0, 2, 7, 10])
315
316 """
317 if scanid is None:
318 raise RuntimeError( 'Please specify a scan no or name to '
319 'retrieve from the scantable' )
320 try:
321 bsel = self.get_selection()
322 sel = selector()
323 if type(scanid) is str:
324 sel.set_name(scanid)
325 return self._select_copy(sel)
326 elif type(scanid) is int:
327 sel.set_scans([scanid])
328 return self._select_copy(sel)
329 elif type(scanid) is list:
330 sel.set_scans(scanid)
331 return self._select_copy(sel)
332 else:
333 msg = "Illegal scanid type, use 'int' or 'list' if ints."
334 raise TypeError(msg)
335 except RuntimeError:
336 raise
337
338 def __str__(self):
339 return Scantable._summary(self)
340
341 def summary(self, filename=None):
342 """\
343 Print a summary of the contents of this scantable.
344
345 Parameters:
346
347 filename: the name of a file to write the putput to
348 Default - no file output
349
350 """
351 info = Scantable._summary(self)
352 if filename is not None:
353 if filename is "":
354 filename = 'scantable_summary.txt'
355 from os.path import expandvars, isdir
356 filename = expandvars(filename)
357 if not isdir(filename):
358 data = open(filename, 'w')
359 data.write(info)
360 data.close()
361 else:
362 msg = "Illegal file name '%s'." % (filename)
363 raise IOError(msg)
364 return page(info)
365
366 def get_spectrum(self, rowno):
367 """Return the spectrum for the current row in the scantable as a list.
368
369 Parameters:
370
371 rowno: the row number to retrieve the spectrum from
372
373 """
374 return self._getspectrum(rowno)
375
376 def get_mask(self, rowno):
377 """Return the mask for the current row in the scantable as a list.
378
379 Parameters:
380
381 rowno: the row number to retrieve the mask from
382
383 """
384 return self._getmask(rowno)
385
386 def set_spectrum(self, spec, rowno):
387 """Set the spectrum for the current row in the scantable.
388
389 Parameters:
390
391 spec: the new spectrum
392
393 rowno: the row number to set the spectrum for
394
395 """
396 assert(len(spec) == self.nchan())
397 return self._setspectrum(spec, rowno)
398
399 def get_coordinate(self, rowno):
400 """Return the (spectral) coordinate for a a given 'rowno'.
401
402 *Note*:
403
404 * This coordinate is only valid until a scantable method modifies
405 the frequency axis.
406 * This coordinate does contain the original frequency set-up
407 NOT the new frame. The conversions however are done using the user
408 specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
409 use scantable.freq_align first. Without it there is no closure,
410 i.e.::
411
412 c = myscan.get_coordinate(0)
413 c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
414
415 Parameters:
416
417 rowno: the row number for the spectral coordinate
418
419 """
420 return coordinate(Scantable.get_coordinate(self, rowno))
421
422 def get_selection(self):
423 """\
424 Get the selection object currently set on this scantable.
425
426 Example::
427
428 sel = scan.get_selection()
429 sel.set_ifs(0) # select IF 0
430 scan.set_selection(sel) # apply modified selection
431
432 """
433 return selector(self._getselection())
434
435 def set_selection(self, selection=None, **kw):
436 """\
437 Select a subset of the data. All following operations on this scantable
438 are only applied to thi selection.
439
440 Parameters:
441
442 selection: a selector object (default unset the selection), or
443 any combination of "pols", "ifs", "beams", "scans",
444 "cycles", "name", "query"
445
446 Examples::
447
448 sel = selector() # create a selection object
449 self.set_scans([0, 3]) # select SCANNO 0 and 3
450 scan.set_selection(sel) # set the selection
451 scan.summary() # will only print summary of scanno 0 an 3
452 scan.set_selection() # unset the selection
453 # or the equivalent
454 scan.set_selection(scans=[0,3])
455 scan.summary() # will only print summary of scanno 0 an 3
456 scan.set_selection() # unset the selection
457
458 """
459 if selection is None:
460 # reset
461 if len(kw) == 0:
462 selection = selector()
463 else:
464 # try keywords
465 for k in kw:
466 if k not in selector.fields:
467 raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
468 selection = selector(**kw)
469 self._setselection(selection)
470
471 def get_row(self, row=0, insitu=None):
472 """\
473 Select a row in the scantable.
474 Return a scantable with single row.
475
476 Parameters:
477
478 row: row no of integration, default is 0.
479 insitu: if False a new scantable is returned. Otherwise, the
480 scaling is done in-situ. The default is taken from .asaprc
481 (False)
482
483 """
484 if insitu is None: insitu = rcParams['insitu']
485 if not insitu:
486 workscan = self.copy()
487 else:
488 workscan = self
489 # Select a row
490 sel=selector()
491 sel.set_rows([row])
492 #sel.set_scans([workscan.getscan(row)])
493 #sel.set_cycles([workscan.getcycle(row)])
494 #sel.set_beams([workscan.getbeam(row)])
495 #sel.set_ifs([workscan.getif(row)])
496 #sel.set_polarisations([workscan.getpol(row)])
497 #sel.set_name(workscan._getsourcename(row))
498 workscan.set_selection(sel)
499 if not workscan.nrow() == 1:
500 msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
501 raise RuntimeError(msg)
502 del sel
503 if insitu:
504 self._assign(workscan)
505 else:
506 return workscan
507
508 @asaplog_post_dec
509 def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
510 """\
511 Determine the specified statistic of the current beam/if/pol
512 Takes a 'mask' as an optional parameter to specify which
513 channels should be excluded.
514
515 Parameters:
516
517 stat: 'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
518 'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
519
520 mask: an optional mask specifying where the statistic
521 should be determined.
522
523 form: format string to print statistic values
524
525 row: row number of spectrum to process.
526 (default is None: for all rows)
527
528 Example:
529 scan.set_unit('channel')
530 msk = scan.create_mask([100, 200], [500, 600])
531 scan.stats(stat='mean', mask=m)
532
533 """
534 mask = mask or []
535 if not self._check_ifs():
536 raise ValueError("Cannot apply mask as the IFs have different "
537 "number of channels. Please use setselection() "
538 "to select individual IFs")
539 rtnabc = False
540 if stat.lower().endswith('_abc'): rtnabc = True
541 getchan = False
542 if stat.lower().startswith('min') or stat.lower().startswith('max'):
543 chan = self._math._minmaxchan(self, mask, stat)
544 getchan = True
545 statvals = []
546 if not rtnabc:
547 if row == None:
548 statvals = self._math._stats(self, mask, stat)
549 else:
550 statvals = self._math._statsrow(self, mask, stat, int(row))
551
552 #def cb(i):
553 # return statvals[i]
554
555 #return self._row_callback(cb, stat)
556
557 label=stat
558 #callback=cb
559 out = ""
560 #outvec = []
561 sep = '-'*50
562
563 if row == None:
564 rows = xrange(self.nrow())
565 elif isinstance(row, int):
566 rows = [ row ]
567
568 for i in rows:
569 refstr = ''
570 statunit= ''
571 if getchan:
572 qx, qy = self.chan2data(rowno=i, chan=chan[i])
573 if rtnabc:
574 statvals.append(qx['value'])
575 refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
576 statunit= '['+qx['unit']+']'
577 else:
578 refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
579
580 tm = self._gettime(i)
581 src = self._getsourcename(i)
582 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
583 out += 'Time[%s]:\n' % (tm)
584 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (self.getbeam(i))
585 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
586 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
587 #outvec.append(callback(i))
588 if len(rows) > 1:
589 # out += ('= %'+form) % (outvec[i]) +' '+refstr+'\n'
590 out += ('= %'+form) % (statvals[i]) +' '+refstr+'\n'
591 else:
592 # out += ('= %'+form) % (outvec[0]) +' '+refstr+'\n'
593 out += ('= %'+form) % (statvals[0]) +' '+refstr+'\n'
594 out += sep+"\n"
595
596 import os
597 if os.environ.has_key( 'USER' ):
598 usr = os.environ['USER']
599 else:
600 import commands
601 usr = commands.getoutput( 'whoami' )
602 tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
603 f = open(tmpfile,'w')
604 print >> f, sep
605 print >> f, ' %s %s' % (label, statunit)
606 print >> f, sep
607 print >> f, out
608 f.close()
609 f = open(tmpfile,'r')
610 x = f.readlines()
611 f.close()
612 asaplog.push(''.join(x), False)
613
614 return statvals
615
616 def chan2data(self, rowno=0, chan=0):
617 """\
618 Returns channel/frequency/velocity and spectral value
619 at an arbitrary row and channel in the scantable.
620
621 Parameters:
622
623 rowno: a row number in the scantable. Default is the
624 first row, i.e. rowno=0
625
626 chan: a channel in the scantable. Default is the first
627 channel, i.e. pos=0
628
629 """
630 if isinstance(rowno, int) and isinstance(chan, int):
631 qx = {'unit': self.get_unit(),
632 'value': self._getabcissa(rowno)[chan]}
633 qy = {'unit': self.get_fluxunit(),
634 'value': self._getspectrum(rowno)[chan]}
635 return qx, qy
636
637 def stddev(self, mask=None):
638 """\
639 Determine the standard deviation of the current beam/if/pol
640 Takes a 'mask' as an optional parameter to specify which
641 channels should be excluded.
642
643 Parameters:
644
645 mask: an optional mask specifying where the standard
646 deviation should be determined.
647
648 Example::
649
650 scan.set_unit('channel')
651 msk = scan.create_mask([100, 200], [500, 600])
652 scan.stddev(mask=m)
653
654 """
655 return self.stats(stat='stddev', mask=mask);
656
657
658 def get_column_names(self):
659 """\
660 Return a list of column names, which can be used for selection.
661 """
662 return list(Scantable.get_column_names(self))
663
664 def get_tsys(self, row=-1):
665 """\
666 Return the System temperatures.
667
668 Parameters:
669
670 row: the rowno to get the information for. (default all rows)
671
672 Returns:
673
674 a list of Tsys values for the current selection
675
676 """
677 if row > -1:
678 return self._get_column(self._gettsys, row)
679 return self._row_callback(self._gettsys, "Tsys")
680
681
682 def get_weather(self, row=-1):
683 """\
684 Return the weather informations.
685
686 Parameters:
687
688 row: the rowno to get the information for. (default all rows)
689
690 Returns:
691
692 a dict or list of of dicts of values for the current selection
693
694 """
695
696 values = self._get_column(self._get_weather, row)
697 if row > -1:
698 return {'temperature': values[0],
699 'pressure': values[1], 'humidity' : values[2],
700 'windspeed' : values[3], 'windaz' : values[4]
701 }
702 else:
703 out = []
704 for r in values:
705
706 out.append({'temperature': r[0],
707 'pressure': r[1], 'humidity' : r[2],
708 'windspeed' : r[3], 'windaz' : r[4]
709 })
710 return out
711
712 def _row_callback(self, callback, label):
713 out = ""
714 outvec = []
715 sep = '-'*50
716 for i in range(self.nrow()):
717 tm = self._gettime(i)
718 src = self._getsourcename(i)
719 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
720 out += 'Time[%s]:\n' % (tm)
721 if self.nbeam(-1) > 1:
722 out += ' Beam[%d] ' % (self.getbeam(i))
723 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
724 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
725 outvec.append(callback(i))
726 out += '= %3.3f\n' % (outvec[i])
727 out += sep+'\n'
728
729 asaplog.push(sep)
730 asaplog.push(" %s" % (label))
731 asaplog.push(sep)
732 asaplog.push(out)
733 asaplog.post()
734 return outvec
735
736 def _get_column(self, callback, row=-1, *args):
737 """
738 """
739 if row == -1:
740 return [callback(i, *args) for i in range(self.nrow())]
741 else:
742 if 0 <= row < self.nrow():
743 return callback(row, *args)
744
745
746 def get_time(self, row=-1, asdatetime=False, prec=-1):
747 """\
748 Get a list of time stamps for the observations.
749 Return a datetime object or a string (default) for each
750 integration time stamp in the scantable.
751
752 Parameters:
753
754 row: row no of integration. Default -1 return all rows
755
756 asdatetime: return values as datetime objects rather than strings
757
758 prec: number of digits shown. Default -1 to automatic calculation.
759 Note this number is equals to the digits of MVTime,
760 i.e., 0<prec<3: dates with hh:: only,
761 <5: with hh:mm:, <7 or 0: with hh:mm:ss,
762 and 6> : with hh:mm:ss.tt... (prec-6 t's added)
763
764 """
765 from datetime import datetime
766 if prec < 0:
767 # automagically set necessary precision +1
768 prec = 7 - numpy.floor(numpy.log10(numpy.min(self.get_inttime(row))))
769 prec = max(6, int(prec))
770 else:
771 prec = max(0, prec)
772 if asdatetime:
773 #precision can be 1 millisecond at max
774 prec = min(12, prec)
775
776 times = self._get_column(self._gettime, row, prec)
777 if not asdatetime:
778 return times
779 format = "%Y/%m/%d/%H:%M:%S.%f"
780 if prec < 7:
781 nsub = 1 + (((6-prec)/2) % 3)
782 substr = [".%f","%S","%M"]
783 for i in range(nsub):
784 format = format.replace(substr[i],"")
785 if isinstance(times, list):
786 return [datetime.strptime(i, format) for i in times]
787 else:
788 return datetime.strptime(times, format)
789
790
791 def get_inttime(self, row=-1):
792 """\
793 Get a list of integration times for the observations.
794 Return a time in seconds for each integration in the scantable.
795
796 Parameters:
797
798 row: row no of integration. Default -1 return all rows.
799
800 """
801 return self._get_column(self._getinttime, row)
802
803
804 def get_sourcename(self, row=-1):
805 """\
806 Get a list source names for the observations.
807 Return a string for each integration in the scantable.
808 Parameters:
809
810 row: row no of integration. Default -1 return all rows.
811
812 """
813 return self._get_column(self._getsourcename, row)
814
815 def get_elevation(self, row=-1):
816 """\
817 Get a list of elevations for the observations.
818 Return a float for each integration in the scantable.
819
820 Parameters:
821
822 row: row no of integration. Default -1 return all rows.
823
824 """
825 return self._get_column(self._getelevation, row)
826
827 def get_azimuth(self, row=-1):
828 """\
829 Get a list of azimuths for the observations.
830 Return a float for each integration in the scantable.
831
832 Parameters:
833 row: row no of integration. Default -1 return all rows.
834
835 """
836 return self._get_column(self._getazimuth, row)
837
838 def get_parangle(self, row=-1):
839 """\
840 Get a list of parallactic angles for the observations.
841 Return a float for each integration in the scantable.
842
843 Parameters:
844
845 row: row no of integration. Default -1 return all rows.
846
847 """
848 return self._get_column(self._getparangle, row)
849
850 def get_direction(self, row=-1):
851 """
852 Get a list of Positions on the sky (direction) for the observations.
853 Return a string for each integration in the scantable.
854
855 Parameters:
856
857 row: row no of integration. Default -1 return all rows
858
859 """
860 return self._get_column(self._getdirection, row)
861
862 def get_directionval(self, row=-1):
863 """\
864 Get a list of Positions on the sky (direction) for the observations.
865 Return a float for each integration in the scantable.
866
867 Parameters:
868
869 row: row no of integration. Default -1 return all rows
870
871 """
872 return self._get_column(self._getdirectionvec, row)
873
874 @asaplog_post_dec
875 def set_unit(self, unit='channel'):
876 """\
877 Set the unit for all following operations on this scantable
878
879 Parameters:
880
881 unit: optional unit, default is 'channel'. Use one of '*Hz',
882 'km/s', 'channel' or equivalent ''
883
884 """
885 varlist = vars()
886 if unit in ['', 'pixel', 'channel']:
887 unit = ''
888 inf = list(self._getcoordinfo())
889 inf[0] = unit
890 self._setcoordinfo(inf)
891 self._add_history("set_unit", varlist)
892
893 @asaplog_post_dec
894 def set_instrument(self, instr):
895 """\
896 Set the instrument for subsequent processing.
897
898 Parameters:
899
900 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
901 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
902
903 """
904 self._setInstrument(instr)
905 self._add_history("set_instument", vars())
906
907 @asaplog_post_dec
908 def set_feedtype(self, feedtype):
909 """\
910 Overwrite the feed type, which might not be set correctly.
911
912 Parameters:
913
914 feedtype: 'linear' or 'circular'
915
916 """
917 self._setfeedtype(feedtype)
918 self._add_history("set_feedtype", vars())
919
920 @asaplog_post_dec
921 def set_doppler(self, doppler='RADIO'):
922 """\
923 Set the doppler for all following operations on this scantable.
924
925 Parameters:
926
927 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
928
929 """
930 varlist = vars()
931 inf = list(self._getcoordinfo())
932 inf[2] = doppler
933 self._setcoordinfo(inf)
934 self._add_history("set_doppler", vars())
935
936 @asaplog_post_dec
937 def set_freqframe(self, frame=None):
938 """\
939 Set the frame type of the Spectral Axis.
940
941 Parameters:
942
943 frame: an optional frame type, default 'LSRK'. Valid frames are:
944 'TOPO', 'LSRD', 'LSRK', 'BARY',
945 'GEO', 'GALACTO', 'LGROUP', 'CMB'
946
947 Example::
948
949 scan.set_freqframe('BARY')
950
951 """
952 frame = frame or rcParams['scantable.freqframe']
953 varlist = vars()
954 # "REST" is not implemented in casacore
955 #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
956 # 'GEO', 'GALACTO', 'LGROUP', 'CMB']
957 valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
958 'GEO', 'GALACTO', 'LGROUP', 'CMB']
959
960 if frame in valid:
961 inf = list(self._getcoordinfo())
962 inf[1] = frame
963 self._setcoordinfo(inf)
964 self._add_history("set_freqframe", varlist)
965 else:
966 msg = "Please specify a valid freq type. Valid types are:\n", valid
967 raise TypeError(msg)
968
969 @asaplog_post_dec
970 def set_dirframe(self, frame=""):
971 """\
972 Set the frame type of the Direction on the sky.
973
974 Parameters:
975
976 frame: an optional frame type, default ''. Valid frames are:
977 'J2000', 'B1950', 'GALACTIC'
978
979 Example:
980
981 scan.set_dirframe('GALACTIC')
982
983 """
984 varlist = vars()
985 Scantable.set_dirframe(self, frame)
986 self._add_history("set_dirframe", varlist)
987
988 def get_unit(self):
989 """\
990 Get the default unit set in this scantable
991
992 Returns:
993
994 A unit string
995
996 """
997 inf = self._getcoordinfo()
998 unit = inf[0]
999 if unit == '': unit = 'channel'
1000 return unit
1001
1002 @asaplog_post_dec
1003 def get_abcissa(self, rowno=0):
1004 """\
1005 Get the abcissa in the current coordinate setup for the currently
1006 selected Beam/IF/Pol
1007
1008 Parameters:
1009
1010 rowno: an optional row number in the scantable. Default is the
1011 first row, i.e. rowno=0
1012
1013 Returns:
1014
1015 The abcissa values and the format string (as a dictionary)
1016
1017 """
1018 abc = self._getabcissa(rowno)
1019 lbl = self._getabcissalabel(rowno)
1020 return abc, lbl
1021
1022 @asaplog_post_dec
1023 def flag(self, row=-1, mask=None, unflag=False):
1024 """\
1025 Flag the selected data using an optional channel mask.
1026
1027 Parameters:
1028
1029 row: an optional row number in the scantable.
1030 Default -1 flags all rows
1031
1032 mask: an optional channel mask, created with create_mask. Default
1033 (no mask) is all channels.
1034
1035 unflag: if True, unflag the data
1036
1037 """
1038 varlist = vars()
1039 mask = mask or []
1040 self._flag(row, mask, unflag)
1041 self._add_history("flag", varlist)
1042
1043 @asaplog_post_dec
1044 def flag_row(self, rows=[], unflag=False):
1045 """\
1046 Flag the selected data in row-based manner.
1047
1048 Parameters:
1049
1050 rows: list of row numbers to be flagged. Default is no row
1051 (must be explicitly specified to execute row-based flagging).
1052
1053 unflag: if True, unflag the data.
1054
1055 """
1056 varlist = vars()
1057 self._flag_row(rows, unflag)
1058 self._add_history("flag_row", varlist)
1059
1060 @asaplog_post_dec
1061 def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
1062 """\
1063 Flag the selected data outside a specified range (in channel-base)
1064
1065 Parameters:
1066
1067 uthres: upper threshold.
1068
1069 dthres: lower threshold
1070
1071 clipoutside: True for flagging data outside the range [dthres:uthres].
1072 False for flagging data inside the range.
1073
1074 unflag: if True, unflag the data.
1075
1076 """
1077 varlist = vars()
1078 self._clip(uthres, dthres, clipoutside, unflag)
1079 self._add_history("clip", varlist)
1080
1081 @asaplog_post_dec
1082 def lag_flag(self, start, end, unit="MHz", insitu=None):
1083 """\
1084 Flag the data in 'lag' space by providing a frequency to remove.
1085 Flagged data in the scantable get interpolated over the region.
1086 No taper is applied.
1087
1088 Parameters:
1089
1090 start: the start frequency (really a period within the
1091 bandwidth) or period to remove
1092
1093 end: the end frequency or period to remove
1094
1095 unit: the frequency unit (default "MHz") or "" for
1096 explicit lag channels
1097
1098 *Notes*:
1099
1100 It is recommended to flag edges of the band or strong
1101 signals beforehand.
1102
1103 """
1104 if insitu is None: insitu = rcParams['insitu']
1105 self._math._setinsitu(insitu)
1106 varlist = vars()
1107 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1108 if not (unit == "" or base.has_key(unit)):
1109 raise ValueError("%s is not a valid unit." % unit)
1110 if unit == "":
1111 s = scantable(self._math._lag_flag(self, start, end, "lags"))
1112 else:
1113 s = scantable(self._math._lag_flag(self, start*base[unit],
1114 end*base[unit], "frequency"))
1115 s._add_history("lag_flag", varlist)
1116 if insitu:
1117 self._assign(s)
1118 else:
1119 return s
1120
1121 @asaplog_post_dec
1122 def fft(self, getrealimag=False, rowno=[]):
1123 """\
1124 Apply FFT to the spectra.
1125 Flagged data in the scantable get interpolated over the region.
1126
1127 Parameters:
1128
1129 getrealimag: If True, returns the real and imaginary part
1130 values of the complex results.
1131 If False (the default), returns the amplitude
1132 (absolute value) normalised with Ndata/2 and
1133 phase (argument, in unit of radian).
1134
1135 rowno: The row number(s) to be processed. int, list
1136 and tuple are accepted. By default ([]), FFT
1137 is applied to the whole data.
1138
1139 Returns:
1140
1141 A dictionary containing two keys, i.e., 'real' and 'imag' for
1142 getrealimag = True, or 'ampl' and 'phase' for getrealimag = False,
1143 respectively.
1144 The value for each key is a list of lists containing the FFT
1145 results from each spectrum.
1146
1147 """
1148 if getrealimag:
1149 res = {"real":[], "imag":[]} # return real and imaginary values
1150 else:
1151 res = {"ampl":[], "phase":[]} # return amplitude and phase(argument)
1152
1153 if isinstance(rowno, int):
1154 rowno = [rowno]
1155 elif not (isinstance(rowno, list) or isinstance(rowno, tuple)):
1156 raise TypeError("The row number(s) should be int, list or tuple.")
1157
1158 nrow = len(rowno)
1159 if nrow == 0: nrow = self.nrow() # by default, calculate for all rows.
1160
1161 fspec = self._math._fft(self, rowno, getrealimag)
1162 nspec = len(fspec)/nrow
1163
1164 i = 0
1165 while (i < nrow):
1166 v1 = []
1167 v2 = []
1168
1169 j = 0
1170 while (j < nspec):
1171 k = i*nspec + j
1172 v1.append(fspec[k])
1173 v2.append(fspec[k+1])
1174 j += 2
1175
1176 if getrealimag:
1177 res["real"].append(v1)
1178 res["imag"].append(v2)
1179 else:
1180 res["ampl"].append(v1)
1181 res["phase"].append(v2)
1182
1183 i += 1
1184
1185 return res
1186
1187 @asaplog_post_dec
1188 def create_mask(self, *args, **kwargs):
1189 """\
1190 Compute and return a mask based on [min, max] windows.
1191 The specified windows are to be INCLUDED, when the mask is
1192 applied.
1193
1194 Parameters:
1195
1196 [min, max], [min2, max2], ...
1197 Pairs of start/end points (inclusive)specifying the regions
1198 to be masked
1199
1200 invert: optional argument. If specified as True,
1201 return an inverted mask, i.e. the regions
1202 specified are EXCLUDED
1203
1204 row: create the mask using the specified row for
1205 unit conversions, default is row=0
1206 only necessary if frequency varies over rows.
1207
1208 Examples::
1209
1210 scan.set_unit('channel')
1211 # a)
1212 msk = scan.create_mask([400, 500], [800, 900])
1213 # masks everything outside 400 and 500
1214 # and 800 and 900 in the unit 'channel'
1215
1216 # b)
1217 msk = scan.create_mask([400, 500], [800, 900], invert=True)
1218 # masks the regions between 400 and 500
1219 # and 800 and 900 in the unit 'channel'
1220
1221 # c)
1222 #mask only channel 400
1223 msk = scan.create_mask([400])
1224
1225 """
1226 row = kwargs.get("row", 0)
1227 data = self._getabcissa(row)
1228 u = self._getcoordinfo()[0]
1229 if u == "":
1230 u = "channel"
1231 msg = "The current mask window unit is %s" % u
1232 i = self._check_ifs()
1233 if not i:
1234 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1235 asaplog.push(msg)
1236 n = self.nchan()
1237 msk = _n_bools(n, False)
1238 # test if args is a 'list' or a 'normal *args - UGLY!!!
1239
1240 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1241 and args or args[0]
1242 for window in ws:
1243 if len(window) == 1:
1244 window = [window[0], window[0]]
1245 if len(window) == 0 or len(window) > 2:
1246 raise ValueError("A window needs to be defined as [start(, end)]")
1247 if window[0] > window[1]:
1248 tmp = window[0]
1249 window[0] = window[1]
1250 window[1] = tmp
1251 for i in range(n):
1252 if data[i] >= window[0] and data[i] <= window[1]:
1253 msk[i] = True
1254 if kwargs.has_key('invert'):
1255 if kwargs.get('invert'):
1256 msk = mask_not(msk)
1257 return msk
1258
1259 def get_masklist(self, mask=None, row=0, silent=False):
1260 """\
1261 Compute and return a list of mask windows, [min, max].
1262
1263 Parameters:
1264
1265 mask: channel mask, created with create_mask.
1266
1267 row: calcutate the masklist using the specified row
1268 for unit conversions, default is row=0
1269 only necessary if frequency varies over rows.
1270
1271 Returns:
1272
1273 [min, max], [min2, max2], ...
1274 Pairs of start/end points (inclusive)specifying
1275 the masked regions
1276
1277 """
1278 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1279 raise TypeError("The mask should be list or tuple.")
1280 if len(mask) < 2:
1281 raise TypeError("The mask elements should be > 1")
1282 if self.nchan() != len(mask):
1283 msg = "Number of channels in scantable != number of mask elements"
1284 raise TypeError(msg)
1285 data = self._getabcissa(row)
1286 u = self._getcoordinfo()[0]
1287 if u == "":
1288 u = "channel"
1289 msg = "The current mask window unit is %s" % u
1290 i = self._check_ifs()
1291 if not i:
1292 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1293 if not silent:
1294 asaplog.push(msg)
1295 masklist=[]
1296 ist, ien = None, None
1297 ist, ien=self.get_mask_indices(mask)
1298 if ist is not None and ien is not None:
1299 for i in xrange(len(ist)):
1300 range=[data[ist[i]],data[ien[i]]]
1301 range.sort()
1302 masklist.append([range[0],range[1]])
1303 return masklist
1304
1305 def get_mask_indices(self, mask=None):
1306 """\
1307 Compute and Return lists of mask start indices and mask end indices.
1308
1309 Parameters:
1310
1311 mask: channel mask, created with create_mask.
1312
1313 Returns:
1314
1315 List of mask start indices and that of mask end indices,
1316 i.e., [istart1,istart2,....], [iend1,iend2,....].
1317
1318 """
1319 if not (isinstance(mask,list) or isinstance(mask, tuple)):
1320 raise TypeError("The mask should be list or tuple.")
1321 if len(mask) < 2:
1322 raise TypeError("The mask elements should be > 1")
1323 istart=[]
1324 iend=[]
1325 if mask[0]: istart.append(0)
1326 for i in range(len(mask)-1):
1327 if not mask[i] and mask[i+1]:
1328 istart.append(i+1)
1329 elif mask[i] and not mask[i+1]:
1330 iend.append(i)
1331 if mask[len(mask)-1]: iend.append(len(mask)-1)
1332 if len(istart) != len(iend):
1333 raise RuntimeError("Numbers of mask start != mask end.")
1334 for i in range(len(istart)):
1335 if istart[i] > iend[i]:
1336 raise RuntimeError("Mask start index > mask end index")
1337 break
1338 return istart,iend
1339
1340 @asaplog_post_dec
1341 def parse_maskexpr(self,maskstring):
1342 """
1343 Parse CASA type mask selection syntax (IF dependent).
1344
1345 Parameters:
1346 maskstring : A string mask selection expression.
1347 A comma separated selections mean different IF -
1348 channel combinations. IFs and channel selections
1349 are partitioned by a colon, ':'.
1350 examples:
1351 '' = all IFs (all channels)
1352 '<2,4~6,9' = IFs 0,1,4,5,6,9 (all channels)
1353 '3:3~45;60' = channels 3 to 45 and 60 in IF 3
1354 '0~1:2~6,8' = channels 2 to 6 in IFs 0,1, and
1355 all channels in IF8
1356 Returns:
1357 A dictionary of selected (valid) IF and masklist pairs,
1358 e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
1359 """
1360 if not isinstance(maskstring,str):
1361 asaplog.post()
1362 asaplog.push("Invalid mask expression")
1363 asaplog.post("ERROR")
1364
1365 valid_ifs = self.getifnos()
1366 frequnit = self.get_unit()
1367 seldict = {}
1368 if maskstring == "":
1369 maskstring = str(valid_ifs)[1:-1]
1370 ## split each selection
1371 sellist = maskstring.split(',')
1372 for currselstr in sellist:
1373 selset = currselstr.split(':')
1374 # spw and mask string (may include ~, < or >)
1375 spwmasklist = self._parse_selection(selset[0],typestr='integer',
1376 offset=1,minval=min(valid_ifs),
1377 maxval=max(valid_ifs))
1378 for spwlist in spwmasklist:
1379 selspws = []
1380 for ispw in range(spwlist[0],spwlist[1]+1):
1381 # Put into the list only if ispw exists
1382 if valid_ifs.count(ispw):
1383 selspws.append(ispw)
1384 del spwmasklist, spwlist
1385
1386 # parse frequency mask list
1387 if len(selset) > 1:
1388 freqmasklist = self._parse_selection(selset[1],typestr='float',
1389 offset=0.)
1390 else:
1391 # want to select the whole spectrum
1392 freqmasklist = [None]
1393
1394 ## define a dictionary of spw - masklist combination
1395 for ispw in selspws:
1396 #print "working on", ispw
1397 spwstr = str(ispw)
1398 if len(selspws) == 0:
1399 # empty spw
1400 continue
1401 else:
1402 ## want to get min and max of the spw and
1403 ## offset to set for '<' and '>'
1404 if frequnit == 'channel':
1405 minfreq = 0
1406 maxfreq = self.nchan(ifno=ispw)
1407 offset = 0.5
1408 else:
1409 ## This is ugly part. need improvement
1410 for ifrow in xrange(self.nrow()):
1411 if self.getif(ifrow) == ispw:
1412 #print "IF",ispw,"found in row =",ifrow
1413 break
1414 freqcoord = self.get_coordinate(ifrow)
1415 freqs = self._getabcissa(ifrow)
1416 minfreq = min(freqs)
1417 maxfreq = max(freqs)
1418 if len(freqs) == 1:
1419 offset = 0.5
1420 elif frequnit.find('Hz') > 0:
1421 offset = abs(freqcoord.to_frequency(1,unit=frequnit)
1422 -freqcoord.to_frequency(0,unit=frequnit))*0.5
1423 elif frequnit.find('m/s') > 0:
1424 offset = abs(freqcoord.to_velocity(1,unit=frequnit)
1425 -freqcoord.to_velocity(0,unit=frequnit))*0.5
1426 else:
1427 asaplog.post()
1428 asaplog.push("Invalid frequency unit")
1429 asaplog.post("ERROR")
1430 del freqs, freqcoord, ifrow
1431 for freq in freqmasklist:
1432 selmask = freq or [minfreq, maxfreq]
1433 if selmask[0] == None:
1434 ## selection was "<freq[1]".
1435 if selmask[1] < minfreq:
1436 ## avoid adding region selection
1437 selmask = None
1438 else:
1439 selmask = [minfreq,selmask[1]-offset]
1440 elif selmask[1] == None:
1441 ## selection was ">freq[0]"
1442 if selmask[0] > maxfreq:
1443 ## avoid adding region selection
1444 selmask = None
1445 else:
1446 selmask = [selmask[0]+offset,maxfreq]
1447 if selmask:
1448 if not seldict.has_key(spwstr):
1449 # new spw selection
1450 seldict[spwstr] = []
1451 seldict[spwstr] += [selmask]
1452 del minfreq,maxfreq,offset,freq,selmask
1453 del spwstr
1454 del freqmasklist
1455 del valid_ifs
1456 if len(seldict) == 0:
1457 asaplog.post()
1458 asaplog.push("No valid selection in the mask expression: "+maskstring)
1459 asaplog.post("WARN")
1460 return None
1461 msg = "Selected masklist:\n"
1462 for sif, lmask in seldict.iteritems():
1463 msg += " IF"+sif+" - "+str(lmask)+"\n"
1464 asaplog.push(msg)
1465 return seldict
1466
1467 def _parse_selection(self,selstr,typestr='float',offset=0.,minval=None,maxval=None):
1468 """
1469 Parameters:
1470 selstr : The Selection string, e.g., '<3;5~7;100~103;9'
1471 typestr : The type of the values in returned list
1472 ('integer' or 'float')
1473 offset : The offset value to subtract from or add to
1474 the boundary value if the selection string
1475 includes '<' or '>'
1476 minval, maxval : The minimum/maximum values to set if the
1477 selection string includes '<' or '>'.
1478 The list element is filled with None by default.
1479 Returns:
1480 A list of min/max pair of selections.
1481 Example:
1482 _parseSelection('<3;5~7;9',typestr='int',offset=1,minval=0)
1483 returns [[0,2],[5,7],[9,9]]
1484 """
1485 selgroups = selstr.split(';')
1486 sellists = []
1487 if typestr.lower().startswith('int'):
1488 formatfunc = int
1489 else:
1490 formatfunc = float
1491
1492 for currsel in selgroups:
1493 if currsel.find('~') > 0:
1494 minsel = formatfunc(currsel.split('~')[0].strip())
1495 maxsel = formatfunc(currsel.split('~')[1].strip())
1496 elif currsel.strip().startswith('<'):
1497 minsel = minval
1498 maxsel = formatfunc(currsel.split('<')[1].strip()) \
1499 - formatfunc(offset)
1500 elif currsel.strip().startswith('>'):
1501 minsel = formatfunc(currsel.split('>')[1].strip()) \
1502 + formatfunc(offset)
1503 maxsel = maxval
1504 else:
1505 minsel = formatfunc(currsel)
1506 maxsel = formatfunc(currsel)
1507 sellists.append([minsel,maxsel])
1508 return sellists
1509
1510# def get_restfreqs(self):
1511# """
1512# Get the restfrequency(s) stored in this scantable.
1513# The return value(s) are always of unit 'Hz'
1514# Parameters:
1515# none
1516# Returns:
1517# a list of doubles
1518# """
1519# return list(self._getrestfreqs())
1520
1521 def get_restfreqs(self, ids=None):
1522 """\
1523 Get the restfrequency(s) stored in this scantable.
1524 The return value(s) are always of unit 'Hz'
1525
1526 Parameters:
1527
1528 ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1529 be retrieved
1530
1531 Returns:
1532
1533 dictionary containing ids and a list of doubles for each id
1534
1535 """
1536 if ids is None:
1537 rfreqs={}
1538 idlist = self.getmolnos()
1539 for i in idlist:
1540 rfreqs[i]=list(self._getrestfreqs(i))
1541 return rfreqs
1542 else:
1543 if type(ids)==list or type(ids)==tuple:
1544 rfreqs={}
1545 for i in ids:
1546 rfreqs[i]=list(self._getrestfreqs(i))
1547 return rfreqs
1548 else:
1549 return list(self._getrestfreqs(ids))
1550 #return list(self._getrestfreqs(ids))
1551
1552 def set_restfreqs(self, freqs=None, unit='Hz'):
1553 """\
1554 Set or replace the restfrequency specified and
1555 if the 'freqs' argument holds a scalar,
1556 then that rest frequency will be applied to all the selected
1557 data. If the 'freqs' argument holds
1558 a vector, then it MUST be of equal or smaller length than
1559 the number of IFs (and the available restfrequencies will be
1560 replaced by this vector). In this case, *all* data have
1561 the restfrequency set per IF according
1562 to the corresponding value you give in the 'freqs' vector.
1563 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
1564 IF 1 gets restfreq 2e9.
1565
1566 You can also specify the frequencies via a linecatalog.
1567
1568 Parameters:
1569
1570 freqs: list of rest frequency values or string idenitfiers
1571
1572 unit: unit for rest frequency (default 'Hz')
1573
1574
1575 Example::
1576
1577 # set the given restfrequency for the all currently selected IFs
1578 scan.set_restfreqs(freqs=1.4e9)
1579 # set restfrequencies for the n IFs (n > 1) in the order of the
1580 # list, i.e
1581 # IF0 -> 1.4e9, IF1 -> 1.41e9, IF3 -> 1.42e9
1582 # len(list_of_restfreqs) == nIF
1583 # for nIF == 1 the following will set multiple restfrequency for
1584 # that IF
1585 scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
1586 # set multiple restfrequencies per IF. as a list of lists where
1587 # the outer list has nIF elements, the inner s arbitrary
1588 scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
1589
1590 *Note*:
1591
1592 To do more sophisticate Restfrequency setting, e.g. on a
1593 source and IF basis, use scantable.set_selection() before using
1594 this function::
1595
1596 # provided your scantable is called scan
1597 selection = selector()
1598 selection.set_name("ORION*")
1599 selection.set_ifs([1])
1600 scan.set_selection(selection)
1601 scan.set_restfreqs(freqs=86.6e9)
1602
1603 """
1604 varlist = vars()
1605 from asap import linecatalog
1606 # simple value
1607 if isinstance(freqs, int) or isinstance(freqs, float):
1608 self._setrestfreqs([freqs], [""], unit)
1609 # list of values
1610 elif isinstance(freqs, list) or isinstance(freqs, tuple):
1611 # list values are scalars
1612 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
1613 if len(freqs) == 1:
1614 self._setrestfreqs(freqs, [""], unit)
1615 else:
1616 # allow the 'old' mode of setting mulitple IFs
1617 sel = selector()
1618 savesel = self._getselection()
1619 iflist = self.getifnos()
1620 if len(freqs)>len(iflist):
1621 raise ValueError("number of elements in list of list "
1622 "exeeds the current IF selections")
1623 iflist = self.getifnos()
1624 for i, fval in enumerate(freqs):
1625 sel.set_ifs(iflist[i])
1626 self._setselection(sel)
1627 self._setrestfreqs([fval], [""], unit)
1628 self._setselection(savesel)
1629
1630 # list values are dict, {'value'=, 'name'=)
1631 elif isinstance(freqs[-1], dict):
1632 values = []
1633 names = []
1634 for d in freqs:
1635 values.append(d["value"])
1636 names.append(d["name"])
1637 self._setrestfreqs(values, names, unit)
1638 elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
1639 sel = selector()
1640 savesel = self._getselection()
1641 iflist = self.getifnos()
1642 if len(freqs)>len(iflist):
1643 raise ValueError("number of elements in list of list exeeds"
1644 " the current IF selections")
1645 for i, fval in enumerate(freqs):
1646 sel.set_ifs(iflist[i])
1647 self._setselection(sel)
1648 self._setrestfreqs(fval, [""], unit)
1649 self._setselection(savesel)
1650 # freqs are to be taken from a linecatalog
1651 elif isinstance(freqs, linecatalog):
1652 sel = selector()
1653 savesel = self._getselection()
1654 for i in xrange(freqs.nrow()):
1655 sel.set_ifs(iflist[i])
1656 self._setselection(sel)
1657 self._setrestfreqs([freqs.get_frequency(i)],
1658 [freqs.get_name(i)], "MHz")
1659 # ensure that we are not iterating past nIF
1660 if i == self.nif()-1: break
1661 self._setselection(savesel)
1662 else:
1663 return
1664 self._add_history("set_restfreqs", varlist)
1665
1666 def shift_refpix(self, delta):
1667 """\
1668 Shift the reference pixel of the Spectra Coordinate by an
1669 integer amount.
1670
1671 Parameters:
1672
1673 delta: the amount to shift by
1674
1675 *Note*:
1676
1677 Be careful using this with broadband data.
1678
1679 """
1680 Scantable.shift_refpix(self, delta)
1681
1682 @asaplog_post_dec
1683 def history(self, filename=None):
1684 """\
1685 Print the history. Optionally to a file.
1686
1687 Parameters:
1688
1689 filename: The name of the file to save the history to.
1690
1691 """
1692 hist = list(self._gethistory())
1693 out = "-"*80
1694 for h in hist:
1695 if h.startswith("---"):
1696 out = "\n".join([out, h])
1697 else:
1698 items = h.split("##")
1699 date = items[0]
1700 func = items[1]
1701 items = items[2:]
1702 out += "\n"+date+"\n"
1703 out += "Function: %s\n Parameters:" % (func)
1704 for i in items:
1705 if i == '':
1706 continue
1707 s = i.split("=")
1708 out += "\n %s = %s" % (s[0], s[1])
1709 out = "\n".join([out, "-"*80])
1710 if filename is not None:
1711 if filename is "":
1712 filename = 'scantable_history.txt'
1713 import os
1714 filename = os.path.expandvars(os.path.expanduser(filename))
1715 if not os.path.isdir(filename):
1716 data = open(filename, 'w')
1717 data.write(out)
1718 data.close()
1719 else:
1720 msg = "Illegal file name '%s'." % (filename)
1721 raise IOError(msg)
1722 return page(out)
1723 #
1724 # Maths business
1725 #
1726 @asaplog_post_dec
1727 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
1728 """\
1729 Return the (time) weighted average of a scan.
1730
1731 *Note*:
1732
1733 in channels only - align if necessary
1734
1735 Parameters:
1736
1737 mask: an optional mask (only used for 'var' and 'tsys'
1738 weighting)
1739
1740 scanav: True averages each scan separately
1741 False (default) averages all scans together,
1742
1743 weight: Weighting scheme.
1744 'none' (mean no weight)
1745 'var' (1/var(spec) weighted)
1746 'tsys' (1/Tsys**2 weighted)
1747 'tint' (integration time weighted)
1748 'tintsys' (Tint/Tsys**2)
1749 'median' ( median averaging)
1750 The default is 'tint'
1751
1752 align: align the spectra in velocity before averaging. It takes
1753 the time of the first spectrum as reference time.
1754
1755 Example::
1756
1757 # time average the scantable without using a mask
1758 newscan = scan.average_time()
1759
1760 """
1761 varlist = vars()
1762 weight = weight or 'TINT'
1763 mask = mask or ()
1764 scanav = (scanav and 'SCAN') or 'NONE'
1765 scan = (self, )
1766
1767 if align:
1768 scan = (self.freq_align(insitu=False), )
1769 s = None
1770 if weight.upper() == 'MEDIAN':
1771 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1772 scanav))
1773 else:
1774 s = scantable(self._math._average(scan, mask, weight.upper(),
1775 scanav))
1776 s._add_history("average_time", varlist)
1777 return s
1778
1779 @asaplog_post_dec
1780 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
1781 """\
1782 Return a scan where all spectra are converted to either
1783 Jansky or Kelvin depending upon the flux units of the scan table.
1784 By default the function tries to look the values up internally.
1785 If it can't find them (or if you want to over-ride), you must
1786 specify EITHER jyperk OR eta (and D which it will try to look up
1787 also if you don't set it). jyperk takes precedence if you set both.
1788
1789 Parameters:
1790
1791 jyperk: the Jy / K conversion factor
1792
1793 eta: the aperture efficiency
1794
1795 d: the geometric diameter (metres)
1796
1797 insitu: if False a new scantable is returned.
1798 Otherwise, the scaling is done in-situ
1799 The default is taken from .asaprc (False)
1800
1801 """
1802 if insitu is None: insitu = rcParams['insitu']
1803 self._math._setinsitu(insitu)
1804 varlist = vars()
1805 jyperk = jyperk or -1.0
1806 d = d or -1.0
1807 eta = eta or -1.0
1808 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1809 s._add_history("convert_flux", varlist)
1810 if insitu: self._assign(s)
1811 else: return s
1812
1813 @asaplog_post_dec
1814 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1815 """\
1816 Return a scan after applying a gain-elevation correction.
1817 The correction can be made via either a polynomial or a
1818 table-based interpolation (and extrapolation if necessary).
1819 You specify polynomial coefficients, an ascii table or neither.
1820 If you specify neither, then a polynomial correction will be made
1821 with built in coefficients known for certain telescopes (an error
1822 will occur if the instrument is not known).
1823 The data and Tsys are *divided* by the scaling factors.
1824
1825 Parameters:
1826
1827 poly: Polynomial coefficients (default None) to compute a
1828 gain-elevation correction as a function of
1829 elevation (in degrees).
1830
1831 filename: The name of an ascii file holding correction factors.
1832 The first row of the ascii file must give the column
1833 names and these MUST include columns
1834 "ELEVATION" (degrees) and "FACTOR" (multiply data
1835 by this) somewhere.
1836 The second row must give the data type of the
1837 column. Use 'R' for Real and 'I' for Integer.
1838 An example file would be
1839 (actual factors are arbitrary) :
1840
1841 TIME ELEVATION FACTOR
1842 R R R
1843 0.1 0 0.8
1844 0.2 20 0.85
1845 0.3 40 0.9
1846 0.4 60 0.85
1847 0.5 80 0.8
1848 0.6 90 0.75
1849
1850 method: Interpolation method when correcting from a table.
1851 Values are "nearest", "linear" (default), "cubic"
1852 and "spline"
1853
1854 insitu: if False a new scantable is returned.
1855 Otherwise, the scaling is done in-situ
1856 The default is taken from .asaprc (False)
1857
1858 """
1859
1860 if insitu is None: insitu = rcParams['insitu']
1861 self._math._setinsitu(insitu)
1862 varlist = vars()
1863 poly = poly or ()
1864 from os.path import expandvars
1865 filename = expandvars(filename)
1866 s = scantable(self._math._gainel(self, poly, filename, method))
1867 s._add_history("gain_el", varlist)
1868 if insitu:
1869 self._assign(s)
1870 else:
1871 return s
1872
1873 @asaplog_post_dec
1874 def freq_align(self, reftime=None, method='cubic', insitu=None):
1875 """\
1876 Return a scan where all rows have been aligned in frequency/velocity.
1877 The alignment frequency frame (e.g. LSRK) is that set by function
1878 set_freqframe.
1879
1880 Parameters:
1881
1882 reftime: reference time to align at. By default, the time of
1883 the first row of data is used.
1884
1885 method: Interpolation method for regridding the spectra.
1886 Choose from "nearest", "linear", "cubic" (default)
1887 and "spline"
1888
1889 insitu: if False a new scantable is returned.
1890 Otherwise, the scaling is done in-situ
1891 The default is taken from .asaprc (False)
1892
1893 """
1894 if insitu is None: insitu = rcParams["insitu"]
1895 self._math._setinsitu(insitu)
1896 varlist = vars()
1897 reftime = reftime or ""
1898 s = scantable(self._math._freq_align(self, reftime, method))
1899 s._add_history("freq_align", varlist)
1900 if insitu: self._assign(s)
1901 else: return s
1902
1903 @asaplog_post_dec
1904 def opacity(self, tau=None, insitu=None):
1905 """\
1906 Apply an opacity correction. The data
1907 and Tsys are multiplied by the correction factor.
1908
1909 Parameters:
1910
1911 tau: (list of) opacity from which the correction factor is
1912 exp(tau*ZD)
1913 where ZD is the zenith-distance.
1914 If a list is provided, it has to be of length nIF,
1915 nIF*nPol or 1 and in order of IF/POL, e.g.
1916 [opif0pol0, opif0pol1, opif1pol0 ...]
1917 if tau is `None` the opacities are determined from a
1918 model.
1919
1920 insitu: if False a new scantable is returned.
1921 Otherwise, the scaling is done in-situ
1922 The default is taken from .asaprc (False)
1923
1924 """
1925 if insitu is None: insitu = rcParams['insitu']
1926 self._math._setinsitu(insitu)
1927 varlist = vars()
1928 if not hasattr(tau, "__len__"):
1929 tau = [tau]
1930 s = scantable(self._math._opacity(self, tau))
1931 s._add_history("opacity", varlist)
1932 if insitu: self._assign(s)
1933 else: return s
1934
1935 @asaplog_post_dec
1936 def bin(self, width=5, insitu=None):
1937 """\
1938 Return a scan where all spectra have been binned up.
1939
1940 Parameters:
1941
1942 width: The bin width (default=5) in pixels
1943
1944 insitu: if False a new scantable is returned.
1945 Otherwise, the scaling is done in-situ
1946 The default is taken from .asaprc (False)
1947
1948 """
1949 if insitu is None: insitu = rcParams['insitu']
1950 self._math._setinsitu(insitu)
1951 varlist = vars()
1952 s = scantable(self._math._bin(self, width))
1953 s._add_history("bin", varlist)
1954 if insitu:
1955 self._assign(s)
1956 else:
1957 return s
1958
1959 @asaplog_post_dec
1960 def resample(self, width=5, method='cubic', insitu=None):
1961 """\
1962 Return a scan where all spectra have been binned up.
1963
1964 Parameters:
1965
1966 width: The bin width (default=5) in pixels
1967
1968 method: Interpolation method when correcting from a table.
1969 Values are "nearest", "linear", "cubic" (default)
1970 and "spline"
1971
1972 insitu: if False a new scantable is returned.
1973 Otherwise, the scaling is done in-situ
1974 The default is taken from .asaprc (False)
1975
1976 """
1977 if insitu is None: insitu = rcParams['insitu']
1978 self._math._setinsitu(insitu)
1979 varlist = vars()
1980 s = scantable(self._math._resample(self, method, width))
1981 s._add_history("resample", varlist)
1982 if insitu: self._assign(s)
1983 else: return s
1984
1985 @asaplog_post_dec
1986 def average_pol(self, mask=None, weight='none'):
1987 """\
1988 Average the Polarisations together.
1989
1990 Parameters:
1991
1992 mask: An optional mask defining the region, where the
1993 averaging will be applied. The output will have all
1994 specified points masked.
1995
1996 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1997 weighted), or 'tsys' (1/Tsys**2 weighted)
1998
1999 """
2000 varlist = vars()
2001 mask = mask or ()
2002 s = scantable(self._math._averagepol(self, mask, weight.upper()))
2003 s._add_history("average_pol", varlist)
2004 return s
2005
2006 @asaplog_post_dec
2007 def average_beam(self, mask=None, weight='none'):
2008 """\
2009 Average the Beams together.
2010
2011 Parameters:
2012 mask: An optional mask defining the region, where the
2013 averaging will be applied. The output will have all
2014 specified points masked.
2015
2016 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
2017 weighted), or 'tsys' (1/Tsys**2 weighted)
2018
2019 """
2020 varlist = vars()
2021 mask = mask or ()
2022 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
2023 s._add_history("average_beam", varlist)
2024 return s
2025
2026 def parallactify(self, pflag):
2027 """\
2028 Set a flag to indicate whether this data should be treated as having
2029 been 'parallactified' (total phase == 0.0)
2030
2031 Parameters:
2032
2033 pflag: Bool indicating whether to turn this on (True) or
2034 off (False)
2035
2036 """
2037 varlist = vars()
2038 self._parallactify(pflag)
2039 self._add_history("parallactify", varlist)
2040
2041 @asaplog_post_dec
2042 def convert_pol(self, poltype=None):
2043 """\
2044 Convert the data to a different polarisation type.
2045 Note that you will need cross-polarisation terms for most conversions.
2046
2047 Parameters:
2048
2049 poltype: The new polarisation type. Valid types are:
2050 "linear", "circular", "stokes" and "linpol"
2051
2052 """
2053 varlist = vars()
2054 s = scantable(self._math._convertpol(self, poltype))
2055 s._add_history("convert_pol", varlist)
2056 return s
2057
2058 @asaplog_post_dec
2059 def smooth(self, kernel="hanning", width=5.0, order=2, plot=False, insitu=None):
2060 """\
2061 Smooth the spectrum by the specified kernel (conserving flux).
2062
2063 Parameters:
2064
2065 kernel: The type of smoothing kernel. Select from
2066 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
2067 or 'poly'
2068
2069 width: The width of the kernel in pixels. For hanning this is
2070 ignored otherwise it defauls to 5 pixels.
2071 For 'gaussian' it is the Full Width Half
2072 Maximum. For 'boxcar' it is the full width.
2073 For 'rmedian' and 'poly' it is the half width.
2074
2075 order: Optional parameter for 'poly' kernel (default is 2), to
2076 specify the order of the polnomial. Ignored by all other
2077 kernels.
2078
2079 plot: plot the original and the smoothed spectra.
2080 In this each indivual fit has to be approved, by
2081 typing 'y' or 'n'
2082
2083 insitu: if False a new scantable is returned.
2084 Otherwise, the scaling is done in-situ
2085 The default is taken from .asaprc (False)
2086
2087 """
2088 if insitu is None: insitu = rcParams['insitu']
2089 self._math._setinsitu(insitu)
2090 varlist = vars()
2091
2092 if plot: orgscan = self.copy()
2093
2094 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
2095 s._add_history("smooth", varlist)
2096
2097 if plot:
2098 from asap.asapplotter import new_asaplot
2099 theplot = new_asaplot(rcParams['plotter.gui'])
2100 theplot.set_panels()
2101 ylab=s._get_ordinate_label()
2102 #theplot.palette(0,["#777777","red"])
2103 for r in xrange(s.nrow()):
2104 xsm=s._getabcissa(r)
2105 ysm=s._getspectrum(r)
2106 xorg=orgscan._getabcissa(r)
2107 yorg=orgscan._getspectrum(r)
2108 theplot.clear()
2109 theplot.hold()
2110 theplot.set_axes('ylabel',ylab)
2111 theplot.set_axes('xlabel',s._getabcissalabel(r))
2112 theplot.set_axes('title',s._getsourcename(r))
2113 theplot.set_line(label='Original',color="#777777")
2114 theplot.plot(xorg,yorg)
2115 theplot.set_line(label='Smoothed',color="red")
2116 theplot.plot(xsm,ysm)
2117 ### Ugly part for legend
2118 for i in [0,1]:
2119 theplot.subplots[0]['lines'].append([theplot.subplots[0]['axes'].lines[i]])
2120 theplot.release()
2121 ### Ugly part for legend
2122 theplot.subplots[0]['lines']=[]
2123 res = raw_input("Accept smoothing ([y]/n): ")
2124 if res.upper() == 'N':
2125 s._setspectrum(yorg, r)
2126 theplot.quit()
2127 del theplot
2128 del orgscan
2129
2130 if insitu: self._assign(s)
2131 else: return s
2132
2133
2134 @asaplog_post_dec
2135 def sinusoid_baseline(self, insitu=None, mask=None, nwave=None, maxwavelength=None,
2136 clipthresh=None, clipniter=None, plot=None, getresidual=None, outlog=None, blfile=None):
2137 """\
2138 Return a scan which has been baselined (all rows) with sinusoidal functions.
2139 Parameters:
2140 insitu: If False a new scantable is returned.
2141 Otherwise, the scaling is done in-situ
2142 The default is taken from .asaprc (False)
2143 mask: An optional mask
2144 nwave: the maximum wave number of sinusoids within
2145 maxwavelength * (spectral range).
2146 The default is 3 (i.e., sinusoids with wave
2147 number of 0(=constant), 1, 2, and 3 are
2148 used for fitting). Also it is possible to
2149 explicitly specify all the wave numbers to
2150 be used, by giving a list including them
2151 (e.g. [0,1,2,15,16]).
2152 maxwavelength: the longest sinusoidal wavelength. The
2153 default is 1.0 (unit: spectral range).
2154 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2155 clipniter: maximum number of iteration of 'clipthresh'-sigma clipping (default is 0)
2156 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2157 plot the fit and the residual. In this each
2158 indivual fit has to be approved, by typing 'y'
2159 or 'n'
2160 getresidual: if False, returns best-fit values instead of
2161 residual. (default is True)
2162 outlog: Output the coefficients of the best-fit
2163 function to logger (default is False)
2164 blfile: Name of a text file in which the best-fit
2165 parameter values to be written
2166 (default is "": no file/logger output)
2167
2168 Example:
2169 # return a scan baselined by a combination of sinusoidal curves having
2170 # wave numbers in spectral window up to 10,
2171 # also with 3-sigma clipping, iteration up to 4 times
2172 bscan = scan.sinusoid_baseline(nwave=10,clipthresh=3.0,clipniter=4)
2173
2174 Note:
2175 The best-fit parameter values output in logger and/or blfile are now
2176 based on specunit of 'channel'.
2177 """
2178
2179 varlist = vars()
2180
2181 if insitu is None: insitu = rcParams["insitu"]
2182 if insitu:
2183 workscan = self
2184 else:
2185 workscan = self.copy()
2186
2187 nchan = workscan.nchan()
2188
2189 if mask is None: mask = [True for i in xrange(nchan)]
2190 if nwave is None: nwave = 3
2191 if maxwavelength is None: maxwavelength = 1.0
2192 if clipthresh is None: clipthresh = 3.0
2193 if clipniter is None: clipniter = 0
2194 if plot is None: plot = False
2195 if getresidual is None: getresidual = True
2196 if outlog is None: outlog = False
2197 if blfile is None: blfile = ""
2198
2199 if isinstance(nwave, int):
2200 in_nwave = nwave
2201 nwave = []
2202 for i in xrange(in_nwave+1): nwave.append(i)
2203
2204 outblfile = (blfile != "") and os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
2205
2206 try:
2207 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
2208 workscan._sinusoid_baseline(mask, nwave, maxwavelength, clipthresh, clipniter, getresidual, outlog, blfile)
2209
2210 workscan._add_history("sinusoid_baseline", varlist)
2211
2212 if insitu:
2213 self._assign(workscan)
2214 else:
2215 return workscan
2216
2217 except RuntimeError, e:
2218 msg = "The fit failed, possibly because it didn't converge."
2219 if rcParams["verbose"]:
2220 asaplog.push(str(e))
2221 asaplog.push(str(msg))
2222 return
2223 else:
2224 raise RuntimeError(str(e)+'\n'+msg)
2225
2226
2227 def auto_sinusoid_baseline(self, insitu=None, mask=None, nwave=None, maxwavelength=None,
2228 clipthresh=None, clipniter=None, edge=None, threshold=None,
2229 chan_avg_limit=None, plot=None, getresidual=None, outlog=None, blfile=None):
2230 """\
2231 Return a scan which has been baselined (all rows) with sinusoidal functions.
2232 Spectral lines are detected first using linefinder and masked out
2233 to avoid them affecting the baseline solution.
2234
2235 Parameters:
2236 insitu: if False a new scantable is returned.
2237 Otherwise, the scaling is done in-situ
2238 The default is taken from .asaprc (False)
2239 mask: an optional mask retreived from scantable
2240 nwave: the maximum wave number of sinusoids within
2241 maxwavelength * (spectral range).
2242 The default is 3 (i.e., sinusoids with wave
2243 number of 0(=constant), 1, 2, and 3 are
2244 used for fitting). Also it is possible to
2245 explicitly specify all the wave numbers to
2246 be used, by giving a list including them
2247 (e.g. [0,1,2,15,16]).
2248 maxwavelength: the longest sinusoidal wavelength. The
2249 default is 1.0 (unit: spectral range).
2250 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2251 clipniter: maximum number of iteration of 'clipthresh'-sigma clipping (default is 0)
2252 edge: an optional number of channel to drop at
2253 the edge of spectrum. If only one value is
2254 specified, the same number will be dropped
2255 from both sides of the spectrum. Default
2256 is to keep all channels. Nested tuples
2257 represent individual edge selection for
2258 different IFs (a number of spectral channels
2259 can be different)
2260 threshold: the threshold used by line finder. It is
2261 better to keep it large as only strong lines
2262 affect the baseline solution.
2263 chan_avg_limit:a maximum number of consequtive spectral
2264 channels to average during the search of
2265 weak and broad lines. The default is no
2266 averaging (and no search for weak lines).
2267 If such lines can affect the fitted baseline
2268 (e.g. a high order polynomial is fitted),
2269 increase this parameter (usually values up
2270 to 8 are reasonable). Most users of this
2271 method should find the default value sufficient.
2272 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2273 plot the fit and the residual. In this each
2274 indivual fit has to be approved, by typing 'y'
2275 or 'n'
2276 getresidual: if False, returns best-fit values instead of
2277 residual. (default is True)
2278 outlog: Output the coefficients of the best-fit
2279 function to logger (default is False)
2280 blfile: Name of a text file in which the best-fit
2281 parameter values to be written
2282 (default is "": no file/logger output)
2283
2284 Example:
2285 bscan = scan.auto_sinusoid_baseline(nwave=10, insitu=False)
2286
2287 Note:
2288 The best-fit parameter values output in logger and/or blfile are now
2289 based on specunit of 'channel'.
2290 """
2291
2292 varlist = vars()
2293
2294 if insitu is None: insitu = rcParams['insitu']
2295 if insitu:
2296 workscan = self
2297 else:
2298 workscan = self.copy()
2299
2300 nchan = workscan.nchan()
2301
2302 if mask is None: mask = [True for i in xrange(nchan)]
2303 if nwave is None: nwave = 3
2304 if maxwavelength is None: maxwavelength = 1.0
2305 if clipthresh is None: clipthresh = 3.0
2306 if clipniter is None: clipniter = 0
2307 if edge is None: edge = (0,0)
2308 if threshold is None: threshold = 3
2309 if chan_avg_limit is None: chan_avg_limit = 1
2310 if plot is None: plot = False
2311 if getresidual is None: getresidual = True
2312 if outlog is None: outlog = False
2313 if blfile is None: blfile = ""
2314
2315 outblfile = (blfile != "") and os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
2316
2317 from asap.asaplinefind import linefinder
2318 from asap import _is_sequence_or_number as _is_valid
2319
2320 if isinstance(nwave, int):
2321 in_nwave = nwave
2322 nwave = []
2323 for i in xrange(in_nwave+1): nwave.append(i)
2324
2325 if not (isinstance(edge, list) or isinstance(edge, tuple)): edge = [ edge ]
2326 individualedge = False;
2327 if len(edge) > 1: individualedge = isinstance(edge[0], list) or isinstance(edge[0], tuple)
2328
2329 if individualedge:
2330 for edgepar in edge:
2331 if not _is_valid(edgepar, int):
2332 raise ValueError, "Each element of the 'edge' tuple has \
2333 to be a pair of integers or an integer."
2334 else:
2335 if not _is_valid(edge, int):
2336 raise ValueError, "Parameter 'edge' has to be an integer or a \
2337 pair of integers specified as a tuple. \
2338 Nested tuples are allowed \
2339 to make individual selection for different IFs."
2340
2341 if len(edge) > 1:
2342 curedge = edge
2343 else:
2344 curedge = edge + edge
2345
2346 try:
2347 #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
2348 if individualedge:
2349 curedge = []
2350 for i in xrange(len(edge)):
2351 curedge += edge[i]
2352
2353 workscan._auto_sinusoid_baseline(mask, nwave, maxwavelength, clipthresh, clipniter, curedge, threshold, chan_avg_limit, getresidual, outlog, blfile)
2354
2355 workscan._add_history("auto_sinusoid_baseline", varlist)
2356
2357 if insitu:
2358 self._assign(workscan)
2359 else:
2360 return workscan
2361
2362 except RuntimeError, e:
2363 msg = "The fit failed, possibly because it didn't converge."
2364 if rcParams["verbose"]:
2365 asaplog.push(str(e))
2366 asaplog.push(str(msg))
2367 return
2368 else:
2369 raise RuntimeError(str(e)+'\n'+msg)
2370
2371
2372 @asaplog_post_dec
2373 def cspline_baseline(self, insitu=None, mask=None, npiece=None,
2374 clipthresh=None, clipniter=None, plot=None, getresidual=None, outlog=None, blfile=None):
2375 """\
2376 Return a scan which has been baselined (all rows) by cubic spline function (piecewise cubic polynomial).
2377 Parameters:
2378 insitu: If False a new scantable is returned.
2379 Otherwise, the scaling is done in-situ
2380 The default is taken from .asaprc (False)
2381 mask: An optional mask
2382 npiece: Number of pieces. (default is 2)
2383 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2384 clipniter: maximum number of iteration of 'clipthresh'-sigma clipping (default is 0)
2385 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2386 plot the fit and the residual. In this each
2387 indivual fit has to be approved, by typing 'y'
2388 or 'n'
2389 getresidual:if False, returns best-fit values instead of
2390 residual. (default is True)
2391 outlog: Output the coefficients of the best-fit
2392 function to logger (default is False)
2393 blfile: Name of a text file in which the best-fit
2394 parameter values to be written
2395 (default is "": no file/logger output)
2396
2397 Example:
2398 # return a scan baselined by a cubic spline consisting of 2 pieces (i.e., 1 internal knot),
2399 # also with 3-sigma clipping, iteration up to 4 times
2400 bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
2401
2402 Note:
2403 The best-fit parameter values output in logger and/or blfile are now
2404 based on specunit of 'channel'.
2405 """
2406
2407 varlist = vars()
2408
2409 if insitu is None: insitu = rcParams["insitu"]
2410 if insitu:
2411 workscan = self
2412 else:
2413 workscan = self.copy()
2414
2415 nchan = workscan.nchan()
2416
2417 if mask is None: mask = [True for i in xrange(nchan)]
2418 if npiece is None: npiece = 2
2419 if clipthresh is None: clipthresh = 3.0
2420 if clipniter is None: clipniter = 0
2421 if plot is None: plot = False
2422 if getresidual is None: getresidual = True
2423 if outlog is None: outlog = False
2424 if blfile is None: blfile = ""
2425
2426 outblfile = (blfile != "") and os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
2427
2428 try:
2429 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
2430 workscan._cspline_baseline(mask, npiece, clipthresh, clipniter, getresidual, outlog, blfile)
2431
2432 workscan._add_history("cspline_baseline", varlist)
2433
2434 if insitu:
2435 self._assign(workscan)
2436 else:
2437 return workscan
2438
2439 except RuntimeError, e:
2440 msg = "The fit failed, possibly because it didn't converge."
2441 if rcParams["verbose"]:
2442 asaplog.push(str(e))
2443 asaplog.push(str(msg))
2444 return
2445 else:
2446 raise RuntimeError(str(e)+'\n'+msg)
2447
2448
2449 def auto_cspline_baseline(self, insitu=None, mask=None, npiece=None, clipthresh=None,
2450 clipniter=None, edge=None, threshold=None,
2451 chan_avg_limit=None, getresidual=None, plot=None, outlog=None, blfile=None):
2452 """\
2453 Return a scan which has been baselined (all rows) by cubic spline
2454 function (piecewise cubic polynomial).
2455 Spectral lines are detected first using linefinder and masked out
2456 to avoid them affecting the baseline solution.
2457
2458 Parameters:
2459 insitu: if False a new scantable is returned.
2460 Otherwise, the scaling is done in-situ
2461 The default is taken from .asaprc (False)
2462 mask: an optional mask retreived from scantable
2463 npiece: Number of pieces. (default is 2)
2464 clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2465 clipniter: maximum number of iteration of 'clipthresh'-sigma clipping (default is 0)
2466 edge: an optional number of channel to drop at
2467 the edge of spectrum. If only one value is
2468 specified, the same number will be dropped
2469 from both sides of the spectrum. Default
2470 is to keep all channels. Nested tuples
2471 represent individual edge selection for
2472 different IFs (a number of spectral channels
2473 can be different)
2474 threshold: the threshold used by line finder. It is
2475 better to keep it large as only strong lines
2476 affect the baseline solution.
2477 chan_avg_limit:
2478 a maximum number of consequtive spectral
2479 channels to average during the search of
2480 weak and broad lines. The default is no
2481 averaging (and no search for weak lines).
2482 If such lines can affect the fitted baseline
2483 (e.g. a high order polynomial is fitted),
2484 increase this parameter (usually values up
2485 to 8 are reasonable). Most users of this
2486 method should find the default value sufficient.
2487 plot: *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2488 plot the fit and the residual. In this each
2489 indivual fit has to be approved, by typing 'y'
2490 or 'n'
2491 getresidual:if False, returns best-fit values instead of
2492 residual. (default is True)
2493 outlog: Output the coefficients of the best-fit
2494 function to logger (default is False)
2495 blfile: Name of a text file in which the best-fit
2496 parameter values to be written
2497 (default is "": no file/logger output)
2498
2499 Example:
2500 bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
2501
2502 Note:
2503 The best-fit parameter values output in logger and/or blfile are now
2504 based on specunit of 'channel'.
2505 """
2506
2507 varlist = vars()
2508
2509 if insitu is None: insitu = rcParams['insitu']
2510 if insitu:
2511 workscan = self
2512 else:
2513 workscan = self.copy()
2514
2515 nchan = workscan.nchan()
2516
2517 if mask is None: mask = [True for i in xrange(nchan)]
2518 if npiece is None: npiece = 2
2519 if clipthresh is None: clipthresh = 3.0
2520 if clipniter is None: clipniter = 0
2521 if edge is None: edge = (0, 0)
2522 if threshold is None: threshold = 3
2523 if chan_avg_limit is None: chan_avg_limit = 1
2524 if plot is None: plot = False
2525 if getresidual is None: getresidual = True
2526 if outlog is None: outlog = False
2527 if blfile is None: blfile = ""
2528
2529 outblfile = (blfile != "") and os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
2530
2531 from asap.asaplinefind import linefinder
2532 from asap import _is_sequence_or_number as _is_valid
2533
2534 if not (isinstance(edge, list) or isinstance(edge, tuple)): edge = [ edge ]
2535 individualedge = False;
2536 if len(edge) > 1: individualedge = isinstance(edge[0], list) or isinstance(edge[0], tuple)
2537
2538 if individualedge:
2539 for edgepar in edge:
2540 if not _is_valid(edgepar, int):
2541 raise ValueError, "Each element of the 'edge' tuple has \
2542 to be a pair of integers or an integer."
2543 else:
2544 if not _is_valid(edge, int):
2545 raise ValueError, "Parameter 'edge' has to be an integer or a \
2546 pair of integers specified as a tuple. \
2547 Nested tuples are allowed \
2548 to make individual selection for different IFs."
2549
2550 if len(edge) > 1:
2551 curedge = edge
2552 else:
2553 curedge = edge + edge
2554
2555 try:
2556 #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
2557 if individualedge:
2558 curedge = []
2559 for i in xrange(len(edge)):
2560 curedge += edge[i]
2561
2562 workscan._auto_cspline_baseline(mask, npiece, clipthresh, clipniter, curedge, threshold, chan_avg_limit, getresidual, outlog, blfile)
2563
2564 workscan._add_history("auto_cspline_baseline", varlist)
2565
2566 if insitu:
2567 self._assign(workscan)
2568 else:
2569 return workscan
2570
2571 except RuntimeError, e:
2572 msg = "The fit failed, possibly because it didn't converge."
2573 if rcParams["verbose"]:
2574 asaplog.push(str(e))
2575 asaplog.push(str(msg))
2576 return
2577 else:
2578 raise RuntimeError(str(e)+'\n'+msg)
2579
2580
2581 @asaplog_post_dec
2582 def poly_baseline(self, insitu=None, mask=None, order=None, plot=None, getresidual=None, outlog=None, blfile=None):
2583 """\
2584 Return a scan which has been baselined (all rows) by a polynomial.
2585 Parameters:
2586 insitu: if False a new scantable is returned.
2587 Otherwise, the scaling is done in-situ
2588 The default is taken from .asaprc (False)
2589 mask: an optional mask
2590 order: the order of the polynomial (default is 0)
2591 plot: plot the fit and the residual. In this each
2592 indivual fit has to be approved, by typing 'y'
2593 or 'n'
2594 getresidual:if False, returns best-fit values instead of
2595 residual. (default is True)
2596 outlog: Output the coefficients of the best-fit
2597 function to logger (default is False)
2598 blfile: Name of a text file in which the best-fit
2599 parameter values to be written
2600 (default is "": no file/logger output)
2601
2602 Example:
2603 # return a scan baselined by a third order polynomial,
2604 # not using a mask
2605 bscan = scan.poly_baseline(order=3)
2606 """
2607
2608 varlist = vars()
2609
2610 if insitu is None: insitu = rcParams["insitu"]
2611 if insitu:
2612 workscan = self
2613 else:
2614 workscan = self.copy()
2615
2616 nchan = workscan.nchan()
2617
2618 if mask is None: mask = [True for i in xrange(nchan)]
2619 if order is None: order = 0
2620 if plot is None: plot = False
2621 if getresidual is None: getresidual = True
2622 if outlog is None: outlog = False
2623 if blfile is None: blfile = ""
2624
2625 outblfile = (blfile != "") and os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
2626
2627 try:
2628 rows = xrange(workscan.nrow())
2629
2630 #if len(rows) > 0: workscan._init_blinfo()
2631
2632 if plot:
2633 if outblfile: blf = open(blfile, "a")
2634
2635 f = fitter()
2636 f.set_function(lpoly=order)
2637 for r in rows:
2638 f.x = workscan._getabcissa(r)
2639 f.y = workscan._getspectrum(r)
2640 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
2641 f.data = None
2642 f.fit()
2643
2644 f.plot(residual=True)
2645 accept_fit = raw_input("Accept fit ( [y]/n ): ")
2646 if accept_fit.upper() == "N":
2647 #workscan._append_blinfo(None, None, None)
2648 continue
2649
2650 blpars = f.get_parameters()
2651 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
2652 #workscan._append_blinfo(blpars, masklist, f.mask)
2653 workscan._setspectrum((f.fitter.getresidual() if getresidual else f.fitter.getfit()), r)
2654
2655 if outblfile:
2656 rms = workscan.get_rms(f.mask, r)
2657 dataout = workscan.format_blparams_row(blpars["params"], blpars["fixed"], rms, str(masklist), r, True)
2658 blf.write(dataout)
2659
2660 f._p.unmap()
2661 f._p = None
2662
2663 if outblfile: blf.close()
2664 else:
2665 workscan._poly_baseline(mask, order, getresidual, outlog, blfile)
2666
2667 workscan._add_history("poly_baseline", varlist)
2668
2669 if insitu:
2670 self._assign(workscan)
2671 else:
2672 return workscan
2673
2674 except RuntimeError, e:
2675 msg = "The fit failed, possibly because it didn't converge."
2676 if rcParams["verbose"]:
2677 asaplog.push(str(e))
2678 asaplog.push(str(msg))
2679 return
2680 else:
2681 raise RuntimeError(str(e)+'\n'+msg)
2682
2683
2684 def auto_poly_baseline(self, insitu=None, mask=None, order=None, edge=None, threshold=None,
2685 chan_avg_limit=None, plot=None, getresidual=None, outlog=None, blfile=None):
2686 """\
2687 Return a scan which has been baselined (all rows) by a polynomial.
2688 Spectral lines are detected first using linefinder and masked out
2689 to avoid them affecting the baseline solution.
2690
2691 Parameters:
2692 insitu: if False a new scantable is returned.
2693 Otherwise, the scaling is done in-situ
2694 The default is taken from .asaprc (False)
2695 mask: an optional mask retreived from scantable
2696 order: the order of the polynomial (default is 0)
2697 edge: an optional number of channel to drop at
2698 the edge of spectrum. If only one value is
2699 specified, the same number will be dropped
2700 from both sides of the spectrum. Default
2701 is to keep all channels. Nested tuples
2702 represent individual edge selection for
2703 different IFs (a number of spectral channels
2704 can be different)
2705 threshold: the threshold used by line finder. It is
2706 better to keep it large as only strong lines
2707 affect the baseline solution.
2708 chan_avg_limit:
2709 a maximum number of consequtive spectral
2710 channels to average during the search of
2711 weak and broad lines. The default is no
2712 averaging (and no search for weak lines).
2713 If such lines can affect the fitted baseline
2714 (e.g. a high order polynomial is fitted),
2715 increase this parameter (usually values up
2716 to 8 are reasonable). Most users of this
2717 method should find the default value sufficient.
2718 plot: plot the fit and the residual. In this each
2719 indivual fit has to be approved, by typing 'y'
2720 or 'n'
2721 getresidual:if False, returns best-fit values instead of
2722 residual. (default is True)
2723 outlog: Output the coefficients of the best-fit
2724 function to logger (default is False)
2725 blfile: Name of a text file in which the best-fit
2726 parameter values to be written
2727 (default is "": no file/logger output)
2728
2729 Example:
2730 bscan = scan.auto_poly_baseline(order=7, insitu=False)
2731 """
2732
2733 varlist = vars()
2734
2735 if insitu is None: insitu = rcParams['insitu']
2736 if insitu:
2737 workscan = self
2738 else:
2739 workscan = self.copy()
2740
2741 nchan = workscan.nchan()
2742
2743 if mask is None: mask = [True for i in xrange(nchan)]
2744 if order is None: order = 0
2745 if edge is None: edge = (0, 0)
2746 if threshold is None: threshold = 3
2747 if chan_avg_limit is None: chan_avg_limit = 1
2748 if plot is None: plot = False
2749 if getresidual is None: getresidual = True
2750 if outlog is None: outlog = False
2751 if blfile is None: blfile = ""
2752
2753 outblfile = (blfile != "") and os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
2754
2755 from asap.asaplinefind import linefinder
2756 from asap import _is_sequence_or_number as _is_valid
2757
2758 if not (isinstance(edge, list) or isinstance(edge, tuple)): edge = [ edge ]
2759 individualedge = False;
2760 if len(edge) > 1: individualedge = isinstance(edge[0], list) or isinstance(edge[0], tuple)
2761
2762 if individualedge:
2763 for edgepar in edge:
2764 if not _is_valid(edgepar, int):
2765 raise ValueError, "Each element of the 'edge' tuple has \
2766 to be a pair of integers or an integer."
2767 else:
2768 if not _is_valid(edge, int):
2769 raise ValueError, "Parameter 'edge' has to be an integer or a \
2770 pair of integers specified as a tuple. \
2771 Nested tuples are allowed \
2772 to make individual selection for different IFs."
2773
2774 if len(edge) > 1:
2775 curedge = edge
2776 else:
2777 curedge = edge + edge
2778
2779 try:
2780 rows = xrange(workscan.nrow())
2781
2782 #if len(rows) > 0: workscan._init_blinfo()
2783
2784 if plot:
2785 if outblfile: blf = open(blfile, "a")
2786
2787 fl = linefinder()
2788 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
2789 fl.set_scan(workscan)
2790 f = fitter()
2791 f.set_function(lpoly=order)
2792
2793 for r in rows:
2794 if individualedge:
2795 if len(edge) <= workscan.getif(r):
2796 raise RuntimeError, "Number of edge elements appear to " \
2797 "be less than the number of IFs"
2798 else:
2799 curedge = edge[workscan.getif(r)]
2800
2801 fl.find_lines(r, mask_and(mask, workscan._getmask(r)), curedge) # (CAS-1434)
2802
2803 f.x = workscan._getabcissa(r)
2804 f.y = workscan._getspectrum(r)
2805 f.mask = fl.get_mask()
2806 f.data = None
2807 f.fit()
2808
2809 f.plot(residual=True)
2810 accept_fit = raw_input("Accept fit ( [y]/n ): ")
2811 if accept_fit.upper() == "N":
2812 #workscan._append_blinfo(None, None, None)
2813 continue
2814
2815 blpars = f.get_parameters()
2816 masklist = workscan.get_masklist(f.mask, row=r, silent=True)
2817 #workscan._append_blinfo(blpars, masklist, f.mask)
2818 workscan._setspectrum((f.fitter.getresidual() if getresidual else f.fitter.getfit()), r)
2819
2820 if outblfile:
2821 rms = workscan.get_rms(f.mask, r)
2822 dataout = workscan.format_blparams_row(blpars["params"], blpars["fixed"], rms, str(masklist), r, True)
2823 blf.write(dataout)
2824
2825 f._p.unmap()
2826 f._p = None
2827
2828 if outblfile: blf.close()
2829
2830 else:
2831 if individualedge:
2832 curedge = []
2833 for i in xrange(len(edge)):
2834 curedge += edge[i]
2835
2836 workscan._auto_poly_baseline(mask, order, curedge, threshold, chan_avg_limit, getresidual, outlog, blfile)
2837
2838 workscan._add_history("auto_poly_baseline", varlist)
2839
2840 if insitu:
2841 self._assign(workscan)
2842 else:
2843 return workscan
2844
2845 except RuntimeError, e:
2846 msg = "The fit failed, possibly because it didn't converge."
2847 if rcParams["verbose"]:
2848 asaplog.push(str(e))
2849 asaplog.push(str(msg))
2850 return
2851 else:
2852 raise RuntimeError(str(e)+'\n'+msg)
2853
2854
2855 ### OBSOLETE ##################################################################
2856 @asaplog_post_dec
2857 def old_poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None, rows=None):
2858 """
2859 Return a scan which has been baselined (all rows) by a polynomial.
2860
2861 Parameters:
2862
2863 mask: an optional mask
2864
2865 order: the order of the polynomial (default is 0)
2866
2867 plot: plot the fit and the residual. In this each
2868 indivual fit has to be approved, by typing 'y'
2869 or 'n'
2870
2871 uselin: use linear polynomial fit
2872
2873 insitu: if False a new scantable is returned.
2874 Otherwise, the scaling is done in-situ
2875 The default is taken from .asaprc (False)
2876
2877 rows: row numbers of spectra to be processed.
2878 (default is None: for all rows)
2879
2880 Example:
2881 # return a scan baselined by a third order polynomial,
2882 # not using a mask
2883 bscan = scan.poly_baseline(order=3)
2884
2885 """
2886 if insitu is None: insitu = rcParams['insitu']
2887 if not insitu:
2888 workscan = self.copy()
2889 else:
2890 workscan = self
2891 varlist = vars()
2892 if mask is None:
2893 mask = [True for i in xrange(self.nchan())]
2894
2895 try:
2896 f = fitter()
2897 if uselin:
2898 f.set_function(lpoly=order)
2899 else:
2900 f.set_function(poly=order)
2901
2902 if rows == None:
2903 rows = xrange(workscan.nrow())
2904 elif isinstance(rows, int):
2905 rows = [ rows ]
2906
2907 if len(rows) > 0:
2908 self.blpars = []
2909 self.masklists = []
2910 self.actualmask = []
2911
2912 for r in rows:
2913 f.x = workscan._getabcissa(r)
2914 f.y = workscan._getspectrum(r)
2915 f.mask = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
2916 f.data = None
2917 f.fit()
2918 if plot:
2919 f.plot(residual=True)
2920 x = raw_input("Accept fit ( [y]/n ): ")
2921 if x.upper() == 'N':
2922 self.blpars.append(None)
2923 self.masklists.append(None)
2924 self.actualmask.append(None)
2925 continue
2926 workscan._setspectrum(f.fitter.getresidual(), r)
2927 self.blpars.append(f.get_parameters())
2928 self.masklists.append(workscan.get_masklist(f.mask, row=r, silent=True))
2929 self.actualmask.append(f.mask)
2930
2931 if plot:
2932 f._p.unmap()
2933 f._p = None
2934 workscan._add_history("poly_baseline", varlist)
2935 if insitu:
2936 self._assign(workscan)
2937 else:
2938 return workscan
2939 except RuntimeError:
2940 msg = "The fit failed, possibly because it didn't converge."
2941 raise RuntimeError(msg)
2942
2943 def _init_blinfo(self):
2944 """\
2945 Initialise the following three auxiliary members:
2946 blpars : parameters of the best-fit baseline,
2947 masklists : mask data (edge positions of masked channels) and
2948 actualmask : mask data (in boolean list),
2949 to keep for use later (including output to logger/text files).
2950 Used by poly_baseline() and auto_poly_baseline() in case of
2951 'plot=True'.
2952 """
2953 self.blpars = []
2954 self.masklists = []
2955 self.actualmask = []
2956 return
2957
2958 def _append_blinfo(self, data_blpars, data_masklists, data_actualmask):
2959 """\
2960 Append baseline-fitting related info to blpars, masklist and
2961 actualmask.
2962 """
2963 self.blpars.append(data_blpars)
2964 self.masklists.append(data_masklists)
2965 self.actualmask.append(data_actualmask)
2966 return
2967
2968 @asaplog_post_dec
2969 def rotate_linpolphase(self, angle):
2970 """\
2971 Rotate the phase of the complex polarization O=Q+iU correlation.
2972 This is always done in situ in the raw data. So if you call this
2973 function more than once then each call rotates the phase further.
2974
2975 Parameters:
2976
2977 angle: The angle (degrees) to rotate (add) by.
2978
2979 Example::
2980
2981 scan.rotate_linpolphase(2.3)
2982
2983 """
2984 varlist = vars()
2985 self._math._rotate_linpolphase(self, angle)
2986 self._add_history("rotate_linpolphase", varlist)
2987 return
2988
2989 @asaplog_post_dec
2990 def rotate_xyphase(self, angle):
2991 """\
2992 Rotate the phase of the XY correlation. This is always done in situ
2993 in the data. So if you call this function more than once
2994 then each call rotates the phase further.
2995
2996 Parameters:
2997
2998 angle: The angle (degrees) to rotate (add) by.
2999
3000 Example::
3001
3002 scan.rotate_xyphase(2.3)
3003
3004 """
3005 varlist = vars()
3006 self._math._rotate_xyphase(self, angle)
3007 self._add_history("rotate_xyphase", varlist)
3008 return
3009
3010 @asaplog_post_dec
3011 def swap_linears(self):
3012 """\
3013 Swap the linear polarisations XX and YY, or better the first two
3014 polarisations as this also works for ciculars.
3015 """
3016 varlist = vars()
3017 self._math._swap_linears(self)
3018 self._add_history("swap_linears", varlist)
3019 return
3020
3021 @asaplog_post_dec
3022 def invert_phase(self):
3023 """\
3024 Invert the phase of the complex polarisation
3025 """
3026 varlist = vars()
3027 self._math._invert_phase(self)
3028 self._add_history("invert_phase", varlist)
3029 return
3030
3031 @asaplog_post_dec
3032 def add(self, offset, insitu=None):
3033 """\
3034 Return a scan where all spectra have the offset added
3035
3036 Parameters:
3037
3038 offset: the offset
3039
3040 insitu: if False a new scantable is returned.
3041 Otherwise, the scaling is done in-situ
3042 The default is taken from .asaprc (False)
3043
3044 """
3045 if insitu is None: insitu = rcParams['insitu']
3046 self._math._setinsitu(insitu)
3047 varlist = vars()
3048 s = scantable(self._math._unaryop(self, offset, "ADD", False))
3049 s._add_history("add", varlist)
3050 if insitu:
3051 self._assign(s)
3052 else:
3053 return s
3054
3055 @asaplog_post_dec
3056 def scale(self, factor, tsys=True, insitu=None):
3057 """\
3058
3059 Return a scan where all spectra are scaled by the given 'factor'
3060
3061 Parameters:
3062
3063 factor: the scaling factor (float or 1D float list)
3064
3065 insitu: if False a new scantable is returned.
3066 Otherwise, the scaling is done in-situ
3067 The default is taken from .asaprc (False)
3068
3069 tsys: if True (default) then apply the operation to Tsys
3070 as well as the data
3071
3072 """
3073 if insitu is None: insitu = rcParams['insitu']
3074 self._math._setinsitu(insitu)
3075 varlist = vars()
3076 s = None
3077 import numpy
3078 if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
3079 if isinstance(factor[0], list) or isinstance(factor[0], numpy.ndarray):
3080 from asapmath import _array2dOp
3081 s = _array2dOp( self.copy(), factor, "MUL", tsys )
3082 else:
3083 s = scantable( self._math._arrayop( self.copy(), factor, "MUL", tsys ) )
3084 else:
3085 s = scantable(self._math._unaryop(self.copy(), factor, "MUL", tsys))
3086 s._add_history("scale", varlist)
3087 if insitu:
3088 self._assign(s)
3089 else:
3090 return s
3091
3092 def set_sourcetype(self, match, matchtype="pattern",
3093 sourcetype="reference"):
3094 """\
3095 Set the type of the source to be an source or reference scan
3096 using the provided pattern.
3097
3098 Parameters:
3099
3100 match: a Unix style pattern, regular expression or selector
3101
3102 matchtype: 'pattern' (default) UNIX style pattern or
3103 'regex' regular expression
3104
3105 sourcetype: the type of the source to use (source/reference)
3106
3107 """
3108 varlist = vars()
3109 basesel = self.get_selection()
3110 stype = -1
3111 if sourcetype.lower().startswith("r"):
3112 stype = 1
3113 elif sourcetype.lower().startswith("s"):
3114 stype = 0
3115 else:
3116 raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
3117 if matchtype.lower().startswith("p"):
3118 matchtype = "pattern"
3119 elif matchtype.lower().startswith("r"):
3120 matchtype = "regex"
3121 else:
3122 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
3123 sel = selector()
3124 if isinstance(match, selector):
3125 sel = match
3126 else:
3127 sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
3128 self.set_selection(basesel+sel)
3129 self._setsourcetype(stype)
3130 self.set_selection(basesel)
3131 self._add_history("set_sourcetype", varlist)
3132
3133 @asaplog_post_dec
3134 @preserve_selection
3135 def auto_quotient(self, preserve=True, mode='paired', verify=False):
3136 """\
3137 This function allows to build quotients automatically.
3138 It assumes the observation to have the same number of
3139 "ons" and "offs"
3140
3141 Parameters:
3142
3143 preserve: you can preserve (default) the continuum or
3144 remove it. The equations used are
3145
3146 preserve: Output = Toff * (on/off) - Toff
3147
3148 remove: Output = Toff * (on/off) - Ton
3149
3150 mode: the on/off detection mode
3151 'paired' (default)
3152 identifies 'off' scans by the
3153 trailing '_R' (Mopra/Parkes) or
3154 '_e'/'_w' (Tid) and matches
3155 on/off pairs from the observing pattern
3156 'time'
3157 finds the closest off in time
3158
3159 .. todo:: verify argument is not implemented
3160
3161 """
3162 varlist = vars()
3163 modes = ["time", "paired"]
3164 if not mode in modes:
3165 msg = "please provide valid mode. Valid modes are %s" % (modes)
3166 raise ValueError(msg)
3167 s = None
3168 if mode.lower() == "paired":
3169 sel = self.get_selection()
3170 sel.set_query("SRCTYPE==psoff")
3171 self.set_selection(sel)
3172 offs = self.copy()
3173 sel.set_query("SRCTYPE==pson")
3174 self.set_selection(sel)
3175 ons = self.copy()
3176 s = scantable(self._math._quotient(ons, offs, preserve))
3177 elif mode.lower() == "time":
3178 s = scantable(self._math._auto_quotient(self, mode, preserve))
3179 s._add_history("auto_quotient", varlist)
3180 return s
3181
3182 @asaplog_post_dec
3183 def mx_quotient(self, mask = None, weight='median', preserve=True):
3184 """\
3185 Form a quotient using "off" beams when observing in "MX" mode.
3186
3187 Parameters:
3188
3189 mask: an optional mask to be used when weight == 'stddev'
3190
3191 weight: How to average the off beams. Default is 'median'.
3192
3193 preserve: you can preserve (default) the continuum or
3194 remove it. The equations used are:
3195
3196 preserve: Output = Toff * (on/off) - Toff
3197
3198 remove: Output = Toff * (on/off) - Ton
3199
3200 """
3201 mask = mask or ()
3202 varlist = vars()
3203 on = scantable(self._math._mx_extract(self, 'on'))
3204 preoff = scantable(self._math._mx_extract(self, 'off'))
3205 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
3206 from asapmath import quotient
3207 q = quotient(on, off, preserve)
3208 q._add_history("mx_quotient", varlist)
3209 return q
3210
3211 @asaplog_post_dec
3212 def freq_switch(self, insitu=None):
3213 """\
3214 Apply frequency switching to the data.
3215
3216 Parameters:
3217
3218 insitu: if False a new scantable is returned.
3219 Otherwise, the swictching is done in-situ
3220 The default is taken from .asaprc (False)
3221
3222 """
3223 if insitu is None: insitu = rcParams['insitu']
3224 self._math._setinsitu(insitu)
3225 varlist = vars()
3226 s = scantable(self._math._freqswitch(self))
3227 s._add_history("freq_switch", varlist)
3228 if insitu:
3229 self._assign(s)
3230 else:
3231 return s
3232
3233 @asaplog_post_dec
3234 def recalc_azel(self):
3235 """Recalculate the azimuth and elevation for each position."""
3236 varlist = vars()
3237 self._recalcazel()
3238 self._add_history("recalc_azel", varlist)
3239 return
3240
3241 @asaplog_post_dec
3242 def __add__(self, other):
3243 varlist = vars()
3244 s = None
3245 if isinstance(other, scantable):
3246 s = scantable(self._math._binaryop(self, other, "ADD"))
3247 elif isinstance(other, float):
3248 s = scantable(self._math._unaryop(self, other, "ADD", False))
3249 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3250 if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
3251 from asapmath import _array2dOp
3252 s = _array2dOp( self.copy(), other, "ADD", False )
3253 else:
3254 s = scantable( self._math._arrayop( self.copy(), other, "ADD", False ) )
3255 else:
3256 raise TypeError("Other input is not a scantable or float value")
3257 s._add_history("operator +", varlist)
3258 return s
3259
3260 @asaplog_post_dec
3261 def __sub__(self, other):
3262 """
3263 implicit on all axes and on Tsys
3264 """
3265 varlist = vars()
3266 s = None
3267 if isinstance(other, scantable):
3268 s = scantable(self._math._binaryop(self, other, "SUB"))
3269 elif isinstance(other, float):
3270 s = scantable(self._math._unaryop(self, other, "SUB", False))
3271 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3272 if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
3273 from asapmath import _array2dOp
3274 s = _array2dOp( self.copy(), other, "SUB", False )
3275 else:
3276 s = scantable( self._math._arrayop( self.copy(), other, "SUB", False ) )
3277 else:
3278 raise TypeError("Other input is not a scantable or float value")
3279 s._add_history("operator -", varlist)
3280 return s
3281
3282 @asaplog_post_dec
3283 def __mul__(self, other):
3284 """
3285 implicit on all axes and on Tsys
3286 """
3287 varlist = vars()
3288 s = None
3289 if isinstance(other, scantable):
3290 s = scantable(self._math._binaryop(self, other, "MUL"))
3291 elif isinstance(other, float):
3292 s = scantable(self._math._unaryop(self, other, "MUL", False))
3293 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3294 if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
3295 from asapmath import _array2dOp
3296 s = _array2dOp( self.copy(), other, "MUL", False )
3297 else:
3298 s = scantable( self._math._arrayop( self.copy(), other, "MUL", False ) )
3299 else:
3300 raise TypeError("Other input is not a scantable or float value")
3301 s._add_history("operator *", varlist)
3302 return s
3303
3304
3305 @asaplog_post_dec
3306 def __div__(self, other):
3307 """
3308 implicit on all axes and on Tsys
3309 """
3310 varlist = vars()
3311 s = None
3312 if isinstance(other, scantable):
3313 s = scantable(self._math._binaryop(self, other, "DIV"))
3314 elif isinstance(other, float):
3315 if other == 0.0:
3316 raise ZeroDivisionError("Dividing by zero is not recommended")
3317 s = scantable(self._math._unaryop(self, other, "DIV", False))
3318 elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3319 if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
3320 from asapmath import _array2dOp
3321 s = _array2dOp( self.copy(), other, "DIV", False )
3322 else:
3323 s = scantable( self._math._arrayop( self.copy(), other, "DIV", False ) )
3324 else:
3325 raise TypeError("Other input is not a scantable or float value")
3326 s._add_history("operator /", varlist)
3327 return s
3328
3329 @asaplog_post_dec
3330 def get_fit(self, row=0):
3331 """\
3332 Print or return the stored fits for a row in the scantable
3333
3334 Parameters:
3335
3336 row: the row which the fit has been applied to.
3337
3338 """
3339 if row > self.nrow():
3340 return
3341 from asap.asapfit import asapfit
3342 fit = asapfit(self._getfit(row))
3343 asaplog.push( '%s' %(fit) )
3344 return fit.as_dict()
3345
3346 def flag_nans(self):
3347 """\
3348 Utility function to flag NaN values in the scantable.
3349 """
3350 import numpy
3351 basesel = self.get_selection()
3352 for i in range(self.nrow()):
3353 sel = self.get_row_selector(i)
3354 self.set_selection(basesel+sel)
3355 nans = numpy.isnan(self._getspectrum(0))
3356 if numpy.any(nans):
3357 bnans = [ bool(v) for v in nans]
3358 self.flag(bnans)
3359 self.set_selection(basesel)
3360
3361 def get_row_selector(self, rowno):
3362 #return selector(beams=self.getbeam(rowno),
3363 # ifs=self.getif(rowno),
3364 # pols=self.getpol(rowno),
3365 # scans=self.getscan(rowno),
3366 # cycles=self.getcycle(rowno))
3367 return selector(rows=[rowno])
3368
3369 def _add_history(self, funcname, parameters):
3370 if not rcParams['scantable.history']:
3371 return
3372 # create date
3373 sep = "##"
3374 from datetime import datetime
3375 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
3376 hist = dstr+sep
3377 hist += funcname+sep#cdate+sep
3378 if parameters.has_key('self'): del parameters['self']
3379 for k, v in parameters.iteritems():
3380 if type(v) is dict:
3381 for k2, v2 in v.iteritems():
3382 hist += k2
3383 hist += "="
3384 if isinstance(v2, scantable):
3385 hist += 'scantable'
3386 elif k2 == 'mask':
3387 if isinstance(v2, list) or isinstance(v2, tuple):
3388 hist += str(self._zip_mask(v2))
3389 else:
3390 hist += str(v2)
3391 else:
3392 hist += str(v2)
3393 else:
3394 hist += k
3395 hist += "="
3396 if isinstance(v, scantable):
3397 hist += 'scantable'
3398 elif k == 'mask':
3399 if isinstance(v, list) or isinstance(v, tuple):
3400 hist += str(self._zip_mask(v))
3401 else:
3402 hist += str(v)
3403 else:
3404 hist += str(v)
3405 hist += sep
3406 hist = hist[:-2] # remove trailing '##'
3407 self._addhistory(hist)
3408
3409
3410 def _zip_mask(self, mask):
3411 mask = list(mask)
3412 i = 0
3413 segments = []
3414 while mask[i:].count(1):
3415 i += mask[i:].index(1)
3416 if mask[i:].count(0):
3417 j = i + mask[i:].index(0)
3418 else:
3419 j = len(mask)
3420 segments.append([i, j])
3421 i = j
3422 return segments
3423
3424 def _get_ordinate_label(self):
3425 fu = "("+self.get_fluxunit()+")"
3426 import re
3427 lbl = "Intensity"
3428 if re.match(".K.", fu):
3429 lbl = "Brightness Temperature "+ fu
3430 elif re.match(".Jy.", fu):
3431 lbl = "Flux density "+ fu
3432 return lbl
3433
3434 def _check_ifs(self):
3435 #nchans = [self.nchan(i) for i in range(self.nif(-1))]
3436 nchans = [self.nchan(i) for i in self.getifnos()]
3437 nchans = filter(lambda t: t > 0, nchans)
3438 return (sum(nchans)/len(nchans) == nchans[0])
3439
3440 @asaplog_post_dec
3441 #def _fill(self, names, unit, average, getpt, antenna):
3442 def _fill(self, names, unit, average, opts={}):
3443 first = True
3444 fullnames = []
3445 for name in names:
3446 name = os.path.expandvars(name)
3447 name = os.path.expanduser(name)
3448 if not os.path.exists(name):
3449 msg = "File '%s' does not exists" % (name)
3450 raise IOError(msg)
3451 fullnames.append(name)
3452 if average:
3453 asaplog.push('Auto averaging integrations')
3454 stype = int(rcParams['scantable.storage'].lower() == 'disk')
3455 for name in fullnames:
3456 tbl = Scantable(stype)
3457 if is_ms( name ):
3458 r = msfiller( tbl )
3459 else:
3460 r = filler( tbl )
3461 rx = rcParams['scantable.reference']
3462 r.setreferenceexpr(rx)
3463 #r = filler(tbl)
3464 #rx = rcParams['scantable.reference']
3465 #r.setreferenceexpr(rx)
3466 msg = "Importing %s..." % (name)
3467 asaplog.push(msg, False)
3468 #opts = {'ms': {'antenna' : antenna, 'getpt': getpt} }
3469 r.open(name, opts)# antenna, -1, -1, getpt)
3470 r.fill()
3471 if average:
3472 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
3473 if not first:
3474 tbl = self._math._merge([self, tbl])
3475 Scantable.__init__(self, tbl)
3476 r.close()
3477 del r, tbl
3478 first = False
3479 #flush log
3480 asaplog.post()
3481 if unit is not None:
3482 self.set_fluxunit(unit)
3483 if not is_casapy():
3484 self.set_freqframe(rcParams['scantable.freqframe'])
3485
3486
3487 def __getitem__(self, key):
3488 if key < 0:
3489 key += self.nrow()
3490 if key >= self.nrow():
3491 raise IndexError("Row index out of range.")
3492 return self._getspectrum(key)
3493
3494 def __setitem__(self, key, value):
3495 if key < 0:
3496 key += self.nrow()
3497 if key >= self.nrow():
3498 raise IndexError("Row index out of range.")
3499 if not hasattr(value, "__len__") or \
3500 len(value) > self.nchan(self.getif(key)):
3501 raise ValueError("Spectrum length doesn't match.")
3502 return self._setspectrum(value, key)
3503
3504 def __len__(self):
3505 return self.nrow()
3506
3507 def __iter__(self):
3508 for i in range(len(self)):
3509 yield self[i]
Note: See TracBrowser for help on using the repository browser.