source: trunk/python/scantable.py @ 1938

Last change on this file since 1938 was 1938, checked in by Malte Marquarding, 14 years ago

merge from branches/asap4casa3.1.0. Should be done the other way

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 92.3 KB
Line 
1"""This module defines the scantable class."""
2
3import os
4try:
5    from functools import wraps as wraps_dec
6except ImportError:
7    from asap.compatibility import wraps as wraps_dec
8
9from asap.env import is_casapy
10from asap._asap import Scantable
11from asap._asap import filler
12from asap.parameters import rcParams
13from asap.logging import asaplog, asaplog_post_dec
14from asap.selector import selector
15from asap.linecatalog import linecatalog
16from asap.coordinate import coordinate
17from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
18from asap.asapfitter import fitter
19
20
21def preserve_selection(func):
22    @wraps_dec(func)
23    def wrap(obj, *args, **kw):
24        basesel = obj.get_selection()
25        try:
26            val = func(obj, *args, **kw)
27        finally:
28            obj.set_selection(basesel)
29        return val
30    return wrap
31
32def is_scantable(filename):
33    """Is the given file a scantable?
34
35    Parameters:
36
37        filename: the name of the file/directory to test
38
39    """
40    if ( os.path.isdir(filename)
41         and os.path.exists(filename+'/table.info')
42         and os.path.exists(filename+'/table.dat') ):
43        f=open(filename+'/table.info')
44        l=f.readline()
45        f.close()
46        #if ( l.find('Scantable') != -1 ):
47        if ( l.find('Measurement Set') == -1 ):
48            return True
49        else:
50            return False
51    else:
52        return False
53##     return (os.path.isdir(filename)
54##             and not os.path.exists(filename+'/table.f1')
55##             and os.path.exists(filename+'/table.info'))
56
57def is_ms(filename):
58    """Is the given file a MeasurementSet?
59
60    Parameters:
61
62        filename: the name of the file/directory to test
63
64    """
65    if ( os.path.isdir(filename)
66         and os.path.exists(filename+'/table.info')
67         and os.path.exists(filename+'/table.dat') ):
68        f=open(filename+'/table.info')
69        l=f.readline()
70        f.close()
71        if ( l.find('Measurement Set') != -1 ):
72            return True
73        else:
74            return False
75    else:
76        return False
77   
78class scantable(Scantable):
79    """\
80        The ASAP container for scans (single-dish data).
81    """
82
83    @asaplog_post_dec
84    #def __init__(self, filename, average=None, unit=None, getpt=None,
85    #             antenna=None, parallactify=None):
86    def __init__(self, filename, average=None, unit=None, parallactify=None, **args):
87        """\
88        Create a scantable from a saved one or make a reference
89
90        Parameters:
91
92            filename:     the name of an asap table on disk
93                          or
94                          the name of a rpfits/sdfits/ms file
95                          (integrations within scans are auto averaged
96                          and the whole file is read) or
97                          [advanced] a reference to an existing scantable
98
99            average:      average all integrations withinb a scan on read.
100                          The default (True) is taken from .asaprc.
101
102            unit:         brightness unit; must be consistent with K or Jy.
103                          Over-rides the default selected by the filler
104                          (input rpfits/sdfits/ms) or replaces the value
105                          in existing scantables
106
107            getpt:        for MeasurementSet input data only:
108                          If True, all pointing data are filled.
109                          The deafult is False, which makes time to load
110                          the MS data faster in some cases.
111
112            antenna:      for MeasurementSet input data only:
113                          Antenna selection. integer (id) or string (name or id).
114
115            parallactify: Indicate that the data had been parallatified. Default
116                          is taken from rc file.
117
118        """
119        if average is None:
120            average = rcParams['scantable.autoaverage']
121        #if getpt is None:
122        #    getpt = True
123        #if antenna is not None:
124        #    asaplog.push("Antenna selection currently unsupported."
125        #                 "Using ''")
126        #    asaplog.post('WARN')
127        #if antenna is None:
128        #    antenna = ''
129        #elif type(antenna) == int:
130        #    antenna = '%s' % antenna
131        #elif type(antenna) == list:
132        #    tmpstr = ''
133        #    for i in range( len(antenna) ):
134        #        if type(antenna[i]) == int:
135        #            tmpstr = tmpstr + ('%s,'%(antenna[i]))
136        #        elif type(antenna[i]) == str:
137        #            tmpstr=tmpstr+antenna[i]+','
138        #        else:
139        #            raise TypeError('Bad antenna selection.')
140        #    antenna = tmpstr.rstrip(',')
141        parallactify = parallactify or rcParams['scantable.parallactify']
142        varlist = vars()
143        from asap._asap import stmath
144        self._math = stmath( rcParams['insitu'] )
145        if isinstance(filename, Scantable):
146            Scantable.__init__(self, filename)
147        else:
148            if isinstance(filename, str):
149                filename = os.path.expandvars(filename)
150                filename = os.path.expanduser(filename)
151                if not os.path.exists(filename):
152                    s = "File '%s' not found." % (filename)
153                    raise IOError(s)
154                if is_scantable(filename):
155                    ondisk = rcParams['scantable.storage'] == 'disk'
156                    Scantable.__init__(self, filename, ondisk)
157                    if unit is not None:
158                        self.set_fluxunit(unit)
159                    # do not reset to the default freqframe
160                    #self.set_freqframe(rcParams['scantable.freqframe'])
161                #elif os.path.isdir(filename) \
162                #         and not os.path.exists(filename+'/table.f1'):
163                elif is_ms(filename):
164                    # Measurement Set
165                    opts={'ms': {}}
166                    mskeys=['getpt','antenna']
167                    for key in mskeys:
168                        if key in args.keys():
169                            opts['ms'][key] = args[key]
170                    #self._fill([filename], unit, average, getpt, antenna)
171                    self._fill([filename], unit, average, opts)
172                elif os.path.isfile(filename):
173                    #self._fill([filename], unit, average, getpt, antenna)
174                    self._fill([filename], unit, average)
175                else:
176                    msg = "The given file '%s'is not a valid " \
177                          "asap table." % (filename)
178                    raise IOError(msg)
179            elif (isinstance(filename, list) or isinstance(filename, tuple)) \
180                  and isinstance(filename[-1], str):
181                #self._fill(filename, unit, average, getpt, antenna)
182                self._fill(filename, unit, average)
183        self.parallactify(parallactify)
184        self._add_history("scantable", varlist)
185
186    @asaplog_post_dec
187    def save(self, name=None, format=None, overwrite=False):
188        """\
189        Store the scantable on disk. This can be an asap (aips++) Table,
190        SDFITS or MS2 format.
191
192        Parameters:
193
194            name:        the name of the outputfile. For format "ASCII"
195                         this is the root file name (data in 'name'.txt
196                         and header in 'name'_header.txt)
197
198            format:      an optional file format. Default is ASAP.
199                         Allowed are:
200
201                            * 'ASAP' (save as ASAP [aips++] Table),
202                            * 'SDFITS' (save as SDFITS file)
203                            * 'ASCII' (saves as ascii text file)
204                            * 'MS2' (saves as an casacore MeasurementSet V2)
205                            * 'FITS' (save as image FITS - not readable by class)
206                            * 'CLASS' (save as FITS readable by CLASS)
207
208            overwrite:   If the file should be overwritten if it exists.
209                         The default False is to return with warning
210                         without writing the output. USE WITH CARE.
211
212        Example::
213
214            scan.save('myscan.asap')
215            scan.save('myscan.sdfits', 'SDFITS')
216
217        """
218        from os import path
219        format = format or rcParams['scantable.save']
220        suffix = '.'+format.lower()
221        if name is None or name == "":
222            name = 'scantable'+suffix
223            msg = "No filename given. Using default name %s..." % name
224            asaplog.push(msg)
225        name = path.expandvars(name)
226        if path.isfile(name) or path.isdir(name):
227            if not overwrite:
228                msg = "File %s exists." % name
229                raise IOError(msg)
230        format2 = format.upper()
231        if format2 == 'ASAP':
232            self._save(name)
233        else:
234            from asap._asap import stwriter as stw
235            writer = stw(format2)
236            writer.write(self, name)
237        return
238
239    def copy(self):
240        """Return a copy of this scantable.
241
242        *Note*:
243
244            This makes a full (deep) copy. scan2 = scan1 makes a reference.
245
246        Example::
247
248            copiedscan = scan.copy()
249
250        """
251        sd = scantable(Scantable._copy(self))
252        return sd
253
254    def drop_scan(self, scanid=None):
255        """\
256        Return a new scantable where the specified scan number(s) has(have)
257        been dropped.
258
259        Parameters:
260
261            scanid:    a (list of) scan number(s)
262
263        """
264        from asap import _is_sequence_or_number as _is_valid
265        from asap import _to_list
266        from asap import unique
267        if not _is_valid(scanid):
268            raise RuntimeError( 'Please specify a scanno to drop from the scantable' )
269        scanid = _to_list(scanid)
270        allscans = unique([ self.getscan(i) for i in range(self.nrow())])
271        for sid in scanid: allscans.remove(sid)
272        if len(allscans) == 0:
273            raise ValueError("Can't remove all scans")
274        sel = selector(scans=allscans)
275        return self._select_copy(sel)
276
277    def _select_copy(self, selection):
278        orig = self.get_selection()
279        self.set_selection(orig+selection)
280        cp = self.copy()
281        self.set_selection(orig)
282        return cp
283
284    def get_scan(self, scanid=None):
285        """\
286        Return a specific scan (by scanno) or collection of scans (by
287        source name) in a new scantable.
288
289        *Note*:
290
291            See scantable.drop_scan() for the inverse operation.
292
293        Parameters:
294
295            scanid:    a (list of) scanno or a source name, unix-style
296                       patterns are accepted for source name matching, e.g.
297                       '*_R' gets all 'ref scans
298
299        Example::
300
301            # get all scans containing the source '323p459'
302            newscan = scan.get_scan('323p459')
303            # get all 'off' scans
304            refscans = scan.get_scan('*_R')
305            # get a susbset of scans by scanno (as listed in scan.summary())
306            newscan = scan.get_scan([0, 2, 7, 10])
307
308        """
309        if scanid is None:
310            raise RuntimeError( 'Please specify a scan no or name to '
311                                'retrieve from the scantable' )
312        try:
313            bsel = self.get_selection()
314            sel = selector()
315            if type(scanid) is str:
316                sel.set_name(scanid)
317                return self._select_copy(sel)
318            elif type(scanid) is int:
319                sel.set_scans([scanid])
320                return self._select_copy(sel)
321            elif type(scanid) is list:
322                sel.set_scans(scanid)
323                return self._select_copy(sel)
324            else:
325                msg = "Illegal scanid type, use 'int' or 'list' if ints."
326                raise TypeError(msg)
327        except RuntimeError:
328            raise
329
330    def __str__(self):
331        return Scantable._summary(self, True)
332
333    def summary(self, filename=None):
334        """\
335        Print a summary of the contents of this scantable.
336
337        Parameters:
338
339            filename:    the name of a file to write the putput to
340                         Default - no file output
341
342        """
343        info = Scantable._summary(self, True)
344        if filename is not None:
345            if filename is "":
346                filename = 'scantable_summary.txt'
347            from os.path import expandvars, isdir
348            filename = expandvars(filename)
349            if not isdir(filename):
350                data = open(filename, 'w')
351                data.write(info)
352                data.close()
353            else:
354                msg = "Illegal file name '%s'." % (filename)
355                raise IOError(msg)
356        return page(info)
357
358    def get_spectrum(self, rowno):
359        """Return the spectrum for the current row in the scantable as a list.
360
361        Parameters:
362
363             rowno:   the row number to retrieve the spectrum from
364
365        """
366        return self._getspectrum(rowno)
367
368    def get_mask(self, rowno):
369        """Return the mask for the current row in the scantable as a list.
370
371        Parameters:
372
373             rowno:   the row number to retrieve the mask from
374
375        """
376        return self._getmask(rowno)
377
378    def set_spectrum(self, spec, rowno):
379        """Set the spectrum for the current row in the scantable.
380
381        Parameters:
382
383             spec:   the new spectrum
384
385             rowno:  the row number to set the spectrum for
386
387        """
388        assert(len(spec) == self.nchan())
389        return self._setspectrum(spec, rowno)
390
391    def get_coordinate(self, rowno):
392        """Return the (spectral) coordinate for a a given 'rowno'.
393
394        *Note*:
395
396            * This coordinate is only valid until a scantable method modifies
397              the frequency axis.
398            * This coordinate does contain the original frequency set-up
399              NOT the new frame. The conversions however are done using the user
400              specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
401              use scantable.freq_align first. Without it there is no closure,
402              i.e.::
403
404                  c = myscan.get_coordinate(0)
405                  c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
406
407        Parameters:
408
409             rowno:    the row number for the spectral coordinate
410
411        """
412        return coordinate(Scantable.get_coordinate(self, rowno))
413
414    def get_selection(self):
415        """\
416        Get the selection object currently set on this scantable.
417
418        Example::
419
420            sel = scan.get_selection()
421            sel.set_ifs(0)              # select IF 0
422            scan.set_selection(sel)     # apply modified selection
423
424        """
425        return selector(self._getselection())
426
427    def set_selection(self, selection=None, **kw):
428        """\
429        Select a subset of the data. All following operations on this scantable
430        are only applied to thi selection.
431
432        Parameters:
433
434            selection:    a selector object (default unset the selection), or
435                          any combination of "pols", "ifs", "beams", "scans",
436                          "cycles", "name", "query"
437
438        Examples::
439
440            sel = selector()         # create a selection object
441            self.set_scans([0, 3])    # select SCANNO 0 and 3
442            scan.set_selection(sel)  # set the selection
443            scan.summary()           # will only print summary of scanno 0 an 3
444            scan.set_selection()     # unset the selection
445            # or the equivalent
446            scan.set_selection(scans=[0,3])
447            scan.summary()           # will only print summary of scanno 0 an 3
448            scan.set_selection()     # unset the selection
449
450        """
451        if selection is None:
452            # reset
453            if len(kw) == 0:
454                selection = selector()
455            else:
456                # try keywords
457                for k in kw:
458                    if k not in selector.fields:
459                        raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
460                selection = selector(**kw)
461        self._setselection(selection)
462
463    def get_row(self, row=0, insitu=None):
464        """\
465        Select a row in the scantable.
466        Return a scantable with single row.
467
468        Parameters:
469
470            row:    row no of integration, default is 0.
471            insitu: if False a new scantable is returned. Otherwise, the
472                    scaling is done in-situ. The default is taken from .asaprc
473                    (False)
474
475        """
476        if insitu is None: insitu = rcParams['insitu']
477        if not insitu:
478            workscan = self.copy()
479        else:
480            workscan = self
481        # Select a row
482        sel=selector()
483        sel.set_scans([workscan.getscan(row)])
484        sel.set_cycles([workscan.getcycle(row)])
485        sel.set_beams([workscan.getbeam(row)])
486        sel.set_ifs([workscan.getif(row)])
487        sel.set_polarisations([workscan.getpol(row)])
488        sel.set_name(workscan._getsourcename(row))
489        workscan.set_selection(sel)
490        if not workscan.nrow() == 1:
491            msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
492            raise RuntimeError(msg)
493        del sel
494        if insitu:
495            self._assign(workscan)
496        else:
497            return workscan
498
499    @asaplog_post_dec
500    def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
501        """\
502        Determine the specified statistic of the current beam/if/pol
503        Takes a 'mask' as an optional parameter to specify which
504        channels should be excluded.
505
506        Parameters:
507
508            stat:    'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
509                     'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
510
511            mask:    an optional mask specifying where the statistic
512                     should be determined.
513
514            form:    format string to print statistic values
515
516            row:     row number of spectrum to process.
517                     (default is None: for all rows)
518
519        Example:
520            scan.set_unit('channel')
521            msk = scan.create_mask([100, 200], [500, 600])
522            scan.stats(stat='mean', mask=m)
523
524        """
525        mask = mask or []
526        if not self._check_ifs():
527            raise ValueError("Cannot apply mask as the IFs have different "
528                             "number of channels. Please use setselection() "
529                             "to select individual IFs")
530        rtnabc = False
531        if stat.lower().endswith('_abc'): rtnabc = True
532        getchan = False
533        if stat.lower().startswith('min') or stat.lower().startswith('max'):
534            chan = self._math._minmaxchan(self, mask, stat)
535            getchan = True
536            statvals = []
537        if not rtnabc:
538            if row == None:
539                statvals = self._math._stats(self, mask, stat)
540            else:
541                statvals = self._math._statsrow(self, mask, stat, int(row))
542
543        #def cb(i):
544        #    return statvals[i]
545
546        #return self._row_callback(cb, stat)
547
548        label=stat
549        #callback=cb
550        out = ""
551        #outvec = []
552        sep = '-'*50
553
554        if row == None:
555            rows = xrange(self.nrow())
556        elif isinstance(row, int):
557            rows = [ row ]
558
559        for i in rows:
560            refstr = ''
561            statunit= ''
562            if getchan:
563                qx, qy = self.chan2data(rowno=i, chan=chan[i])
564                if rtnabc:
565                    statvals.append(qx['value'])
566                    refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
567                    statunit= '['+qx['unit']+']'
568                else:
569                    refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
570
571            tm = self._gettime(i)
572            src = self._getsourcename(i)
573            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
574            out += 'Time[%s]:\n' % (tm)
575            if self.nbeam(-1) > 1: out +=  ' Beam[%d] ' % (self.getbeam(i))
576            if self.nif(-1) > 1:   out +=  ' IF[%d] ' % (self.getif(i))
577            if self.npol(-1) > 1:  out +=  ' Pol[%d] ' % (self.getpol(i))
578            #outvec.append(callback(i))
579            if len(rows) > 1:
580                # out += ('= %'+form) % (outvec[i]) +'   '+refstr+'\n'
581                out += ('= %'+form) % (statvals[i]) +'   '+refstr+'\n'
582            else:
583                # out += ('= %'+form) % (outvec[0]) +'   '+refstr+'\n'
584                out += ('= %'+form) % (statvals[0]) +'   '+refstr+'\n'
585            out +=  sep+"\n"
586
587        import os
588        if os.environ.has_key( 'USER' ):
589            usr = os.environ['USER']
590        else:
591            import commands
592            usr = commands.getoutput( 'whoami' )
593        tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
594        f = open(tmpfile,'w')
595        print >> f, sep
596        print >> f, ' %s %s' % (label, statunit)
597        print >> f, sep
598        print >> f, out
599        f.close()
600        f = open(tmpfile,'r')
601        x = f.readlines()
602        f.close()
603        asaplog.push(''.join(x), False)
604
605        return statvals
606
607    def chan2data(self, rowno=0, chan=0):
608        """\
609        Returns channel/frequency/velocity and spectral value
610        at an arbitrary row and channel in the scantable.
611
612        Parameters:
613
614            rowno:   a row number in the scantable. Default is the
615                     first row, i.e. rowno=0
616
617            chan:    a channel in the scantable. Default is the first
618                     channel, i.e. pos=0
619
620        """
621        if isinstance(rowno, int) and isinstance(chan, int):
622            qx = {'unit': self.get_unit(),
623                  'value': self._getabcissa(rowno)[chan]}
624            qy = {'unit': self.get_fluxunit(),
625                  'value': self._getspectrum(rowno)[chan]}
626            return qx, qy
627
628    def stddev(self, mask=None):
629        """\
630        Determine the standard deviation of the current beam/if/pol
631        Takes a 'mask' as an optional parameter to specify which
632        channels should be excluded.
633
634        Parameters:
635
636            mask:    an optional mask specifying where the standard
637                     deviation should be determined.
638
639        Example::
640
641            scan.set_unit('channel')
642            msk = scan.create_mask([100, 200], [500, 600])
643            scan.stddev(mask=m)
644
645        """
646        return self.stats(stat='stddev', mask=mask);
647
648
649    def get_column_names(self):
650        """\
651        Return a  list of column names, which can be used for selection.
652        """
653        return list(Scantable.get_column_names(self))
654
655    def get_tsys(self, row=-1):
656        """\
657        Return the System temperatures.
658
659        Parameters:
660
661            row:    the rowno to get the information for. (default all rows)
662
663        Returns:
664
665            a list of Tsys values for the current selection
666
667        """
668        if row > -1:
669            return self._get_column(self._gettsys, row)
670        return self._row_callback(self._gettsys, "Tsys")
671
672
673    def get_weather(self, row=-1):
674        """\
675        Return the weather informations.
676
677        Parameters:
678
679            row:    the rowno to get the information for. (default all rows)
680
681        Returns:
682
683            a dict or list of of dicts of values for the current selection
684
685        """
686
687        values = self._get_column(self._get_weather, row)
688        if row > -1:
689            return {'temperature': values[0],
690                    'pressure': values[1], 'humidity' : values[2],
691                    'windspeed' : values[3], 'windaz' : values[4]
692                    }
693        else:
694            out = []
695            for r in values:
696
697                out.append({'temperature': r[0],
698                            'pressure': r[1], 'humidity' : r[2],
699                            'windspeed' : r[3], 'windaz' : r[4]
700                    })
701            return out
702
703    def _row_callback(self, callback, label):
704        out = ""
705        outvec = []
706        sep = '-'*50
707        for i in range(self.nrow()):
708            tm = self._gettime(i)
709            src = self._getsourcename(i)
710            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
711            out += 'Time[%s]:\n' % (tm)
712            if self.nbeam(-1) > 1:
713                out +=  ' Beam[%d] ' % (self.getbeam(i))
714            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (self.getif(i))
715            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (self.getpol(i))
716            outvec.append(callback(i))
717            out += '= %3.3f\n' % (outvec[i])
718            out +=  sep+'\n'
719
720        asaplog.push(sep)
721        asaplog.push(" %s" % (label))
722        asaplog.push(sep)
723        asaplog.push(out)
724        asaplog.post()
725        return outvec
726
727    def _get_column(self, callback, row=-1):
728        """
729        """
730        if row == -1:
731            return [callback(i) for i in range(self.nrow())]
732        else:
733            if  0 <= row < self.nrow():
734                return callback(row)
735
736
737    def get_time(self, row=-1, asdatetime=False):
738        """\
739        Get a list of time stamps for the observations.
740        Return a datetime object or a string (default) for each
741        integration time stamp in the scantable.
742
743        Parameters:
744
745            row:          row no of integration. Default -1 return all rows
746
747            asdatetime:   return values as datetime objects rather than strings
748
749        """
750        from time import strptime
751        from datetime import datetime
752        times = self._get_column(self._gettime, row)
753        if not asdatetime:
754            return times
755        format = "%Y/%m/%d/%H:%M:%S"
756        if isinstance(times, list):
757            return [datetime(*strptime(i, format)[:6]) for i in times]
758        else:
759            return datetime(*strptime(times, format)[:6])
760
761
762    def get_inttime(self, row=-1):
763        """\
764        Get a list of integration times for the observations.
765        Return a time in seconds for each integration in the scantable.
766
767        Parameters:
768
769            row:    row no of integration. Default -1 return all rows.
770
771        """
772        return self._get_column(self._getinttime, row)
773
774
775    def get_sourcename(self, row=-1):
776        """\
777        Get a list source names for the observations.
778        Return a string for each integration in the scantable.
779        Parameters:
780
781            row:    row no of integration. Default -1 return all rows.
782
783        """
784        return self._get_column(self._getsourcename, row)
785
786    def get_elevation(self, row=-1):
787        """\
788        Get a list of elevations for the observations.
789        Return a float for each integration in the scantable.
790
791        Parameters:
792
793            row:    row no of integration. Default -1 return all rows.
794
795        """
796        return self._get_column(self._getelevation, row)
797
798    def get_azimuth(self, row=-1):
799        """\
800        Get a list of azimuths for the observations.
801        Return a float for each integration in the scantable.
802
803        Parameters:
804            row:    row no of integration. Default -1 return all rows.
805
806        """
807        return self._get_column(self._getazimuth, row)
808
809    def get_parangle(self, row=-1):
810        """\
811        Get a list of parallactic angles for the observations.
812        Return a float for each integration in the scantable.
813
814        Parameters:
815
816            row:    row no of integration. Default -1 return all rows.
817
818        """
819        return self._get_column(self._getparangle, row)
820
821    def get_direction(self, row=-1):
822        """
823        Get a list of Positions on the sky (direction) for the observations.
824        Return a string for each integration in the scantable.
825
826        Parameters:
827
828            row:    row no of integration. Default -1 return all rows
829
830        """
831        return self._get_column(self._getdirection, row)
832
833    def get_directionval(self, row=-1):
834        """\
835        Get a list of Positions on the sky (direction) for the observations.
836        Return a float for each integration in the scantable.
837
838        Parameters:
839
840            row:    row no of integration. Default -1 return all rows
841
842        """
843        return self._get_column(self._getdirectionvec, row)
844
845    @asaplog_post_dec
846    def set_unit(self, unit='channel'):
847        """\
848        Set the unit for all following operations on this scantable
849
850        Parameters:
851
852            unit:    optional unit, default is 'channel'. Use one of '*Hz',
853                     'km/s', 'channel' or equivalent ''
854
855        """
856        varlist = vars()
857        if unit in ['', 'pixel', 'channel']:
858            unit = ''
859        inf = list(self._getcoordinfo())
860        inf[0] = unit
861        self._setcoordinfo(inf)
862        self._add_history("set_unit", varlist)
863
864    @asaplog_post_dec
865    def set_instrument(self, instr):
866        """\
867        Set the instrument for subsequent processing.
868
869        Parameters:
870
871            instr:    Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
872                      'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
873
874        """
875        self._setInstrument(instr)
876        self._add_history("set_instument", vars())
877
878    @asaplog_post_dec
879    def set_feedtype(self, feedtype):
880        """\
881        Overwrite the feed type, which might not be set correctly.
882
883        Parameters:
884
885            feedtype:     'linear' or 'circular'
886
887        """
888        self._setfeedtype(feedtype)
889        self._add_history("set_feedtype", vars())
890
891    @asaplog_post_dec
892    def set_doppler(self, doppler='RADIO'):
893        """\
894        Set the doppler for all following operations on this scantable.
895
896        Parameters:
897
898            doppler:    One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
899
900        """
901        varlist = vars()
902        inf = list(self._getcoordinfo())
903        inf[2] = doppler
904        self._setcoordinfo(inf)
905        self._add_history("set_doppler", vars())
906
907    @asaplog_post_dec
908    def set_freqframe(self, frame=None):
909        """\
910        Set the frame type of the Spectral Axis.
911
912        Parameters:
913
914            frame:   an optional frame type, default 'LSRK'. Valid frames are:
915                     'TOPO', 'LSRD', 'LSRK', 'BARY',
916                     'GEO', 'GALACTO', 'LGROUP', 'CMB'
917
918        Example::
919
920            scan.set_freqframe('BARY')
921
922        """
923        frame = frame or rcParams['scantable.freqframe']
924        varlist = vars()
925        # "REST" is not implemented in casacore
926        #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
927        #           'GEO', 'GALACTO', 'LGROUP', 'CMB']
928        valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
929                   'GEO', 'GALACTO', 'LGROUP', 'CMB']
930
931        if frame in valid:
932            inf = list(self._getcoordinfo())
933            inf[1] = frame
934            self._setcoordinfo(inf)
935            self._add_history("set_freqframe", varlist)
936        else:
937            msg  = "Please specify a valid freq type. Valid types are:\n", valid
938            raise TypeError(msg)
939
940    @asaplog_post_dec
941    def set_dirframe(self, frame=""):
942        """\
943        Set the frame type of the Direction on the sky.
944
945        Parameters:
946
947            frame:   an optional frame type, default ''. Valid frames are:
948                     'J2000', 'B1950', 'GALACTIC'
949
950        Example:
951
952            scan.set_dirframe('GALACTIC')
953
954        """
955        varlist = vars()
956        Scantable.set_dirframe(self, frame)
957        self._add_history("set_dirframe", varlist)
958
959    def get_unit(self):
960        """\
961        Get the default unit set in this scantable
962
963        Returns:
964
965            A unit string
966
967        """
968        inf = self._getcoordinfo()
969        unit = inf[0]
970        if unit == '': unit = 'channel'
971        return unit
972
973    @asaplog_post_dec
974    def get_abcissa(self, rowno=0):
975        """\
976        Get the abcissa in the current coordinate setup for the currently
977        selected Beam/IF/Pol
978
979        Parameters:
980
981            rowno:    an optional row number in the scantable. Default is the
982                      first row, i.e. rowno=0
983
984        Returns:
985
986            The abcissa values and the format string (as a dictionary)
987
988        """
989        abc = self._getabcissa(rowno)
990        lbl = self._getabcissalabel(rowno)
991        return abc, lbl
992
993    @asaplog_post_dec
994    def flag(self, mask=None, unflag=False):
995        """\
996        Flag the selected data using an optional channel mask.
997
998        Parameters:
999
1000            mask:   an optional channel mask, created with create_mask. Default
1001                    (no mask) is all channels.
1002
1003            unflag:    if True, unflag the data
1004
1005        """
1006        varlist = vars()
1007        mask = mask or []
1008        self._flag(mask, unflag)
1009        self._add_history("flag", varlist)
1010
1011    @asaplog_post_dec
1012    def flag_row(self, rows=[], unflag=False):
1013        """\
1014        Flag the selected data in row-based manner.
1015
1016        Parameters:
1017
1018            rows:   list of row numbers to be flagged. Default is no row
1019                    (must be explicitly specified to execute row-based flagging).
1020
1021            unflag: if True, unflag the data.
1022
1023        """
1024        varlist = vars()
1025        self._flag_row(rows, unflag)
1026        self._add_history("flag_row", varlist)
1027
1028    @asaplog_post_dec
1029    def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
1030        """\
1031        Flag the selected data outside a specified range (in channel-base)
1032
1033        Parameters:
1034
1035            uthres:      upper threshold.
1036
1037            dthres:      lower threshold
1038
1039            clipoutside: True for flagging data outside the range [dthres:uthres].
1040                         False for flagging data inside the range.
1041
1042            unflag:      if True, unflag the data.
1043
1044        """
1045        varlist = vars()
1046        self._clip(uthres, dthres, clipoutside, unflag)
1047        self._add_history("clip", varlist)
1048
1049    @asaplog_post_dec
1050    def lag_flag(self, start, end, unit="MHz", insitu=None):
1051        """\
1052        Flag the data in 'lag' space by providing a frequency to remove.
1053        Flagged data in the scantable gets interpolated over the region.
1054        No taper is applied.
1055
1056        Parameters:
1057
1058            start:    the start frequency (really a period within the
1059                      bandwidth)  or period to remove
1060
1061            end:      the end frequency or period to remove
1062
1063            unit:     the frequency unit (default "MHz") or "" for
1064                      explicit lag channels
1065
1066        *Notes*:
1067
1068            It is recommended to flag edges of the band or strong
1069            signals beforehand.
1070
1071        """
1072        if insitu is None: insitu = rcParams['insitu']
1073        self._math._setinsitu(insitu)
1074        varlist = vars()
1075        base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1076        if not (unit == "" or base.has_key(unit)):
1077            raise ValueError("%s is not a valid unit." % unit)
1078        if unit == "":
1079            s = scantable(self._math._lag_flag(self, start, end, "lags"))
1080        else:
1081            s = scantable(self._math._lag_flag(self, start*base[unit],
1082                                               end*base[unit], "frequency"))
1083        s._add_history("lag_flag", varlist)
1084        if insitu:
1085            self._assign(s)
1086        else:
1087            return s
1088
1089    @asaplog_post_dec
1090    def create_mask(self, *args, **kwargs):
1091        """\
1092        Compute and return a mask based on [min, max] windows.
1093        The specified windows are to be INCLUDED, when the mask is
1094        applied.
1095
1096        Parameters:
1097
1098            [min, max], [min2, max2], ...
1099                Pairs of start/end points (inclusive)specifying the regions
1100                to be masked
1101
1102            invert:     optional argument. If specified as True,
1103                        return an inverted mask, i.e. the regions
1104                        specified are EXCLUDED
1105
1106            row:        create the mask using the specified row for
1107                        unit conversions, default is row=0
1108                        only necessary if frequency varies over rows.
1109
1110        Examples::
1111
1112            scan.set_unit('channel')
1113            # a)
1114            msk = scan.create_mask([400, 500], [800, 900])
1115            # masks everything outside 400 and 500
1116            # and 800 and 900 in the unit 'channel'
1117
1118            # b)
1119            msk = scan.create_mask([400, 500], [800, 900], invert=True)
1120            # masks the regions between 400 and 500
1121            # and 800 and 900 in the unit 'channel'
1122
1123            # c)
1124            #mask only channel 400
1125            msk =  scan.create_mask([400])
1126
1127        """
1128        row = kwargs.get("row", 0)
1129        data = self._getabcissa(row)
1130        u = self._getcoordinfo()[0]
1131        if u == "":
1132            u = "channel"
1133        msg = "The current mask window unit is %s" % u
1134        i = self._check_ifs()
1135        if not i:
1136            msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1137        asaplog.push(msg)
1138        n = self.nchan()
1139        msk = _n_bools(n, False)
1140        # test if args is a 'list' or a 'normal *args - UGLY!!!
1141
1142        ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1143             and args or args[0]
1144        for window in ws:
1145            if len(window) == 1:
1146                window = [window[0], window[0]]
1147            if len(window) == 0 or  len(window) > 2:
1148                raise ValueError("A window needs to be defined as [start(, end)]")
1149            if window[0] > window[1]:
1150                tmp = window[0]
1151                window[0] = window[1]
1152                window[1] = tmp
1153            for i in range(n):
1154                if data[i] >= window[0] and data[i] <= window[1]:
1155                    msk[i] = True
1156        if kwargs.has_key('invert'):
1157            if kwargs.get('invert'):
1158                msk = mask_not(msk)
1159        return msk
1160
1161    def get_masklist(self, mask=None, row=0, silent=False):
1162        """\
1163        Compute and return a list of mask windows, [min, max].
1164
1165        Parameters:
1166
1167            mask:       channel mask, created with create_mask.
1168
1169            row:        calcutate the masklist using the specified row
1170                        for unit conversions, default is row=0
1171                        only necessary if frequency varies over rows.
1172
1173        Returns:
1174
1175            [min, max], [min2, max2], ...
1176                Pairs of start/end points (inclusive)specifying
1177                the masked regions
1178
1179        """
1180        if not (isinstance(mask,list) or isinstance(mask, tuple)):
1181            raise TypeError("The mask should be list or tuple.")
1182        if len(mask) < 2:
1183            raise TypeError("The mask elements should be > 1")
1184        if self.nchan() != len(mask):
1185            msg = "Number of channels in scantable != number of mask elements"
1186            raise TypeError(msg)
1187        data = self._getabcissa(row)
1188        u = self._getcoordinfo()[0]
1189        if u == "":
1190            u = "channel"
1191        msg = "The current mask window unit is %s" % u
1192        i = self._check_ifs()
1193        if not i:
1194            msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1195        if not silent:
1196            asaplog.push(msg)
1197        masklist=[]
1198        ist, ien = None, None
1199        ist, ien=self.get_mask_indices(mask)
1200        if ist is not None and ien is not None:
1201            for i in xrange(len(ist)):
1202                range=[data[ist[i]],data[ien[i]]]
1203                range.sort()
1204                masklist.append([range[0],range[1]])
1205        return masklist
1206
1207    def get_mask_indices(self, mask=None):
1208        """\
1209        Compute and Return lists of mask start indices and mask end indices.
1210
1211        Parameters:
1212
1213            mask:       channel mask, created with create_mask.
1214
1215        Returns:
1216
1217            List of mask start indices and that of mask end indices,
1218            i.e., [istart1,istart2,....], [iend1,iend2,....].
1219
1220        """
1221        if not (isinstance(mask,list) or isinstance(mask, tuple)):
1222            raise TypeError("The mask should be list or tuple.")
1223        if len(mask) < 2:
1224            raise TypeError("The mask elements should be > 1")
1225        istart=[]
1226        iend=[]
1227        if mask[0]: istart.append(0)
1228        for i in range(len(mask)-1):
1229            if not mask[i] and mask[i+1]:
1230                istart.append(i+1)
1231            elif mask[i] and not mask[i+1]:
1232                iend.append(i)
1233        if mask[len(mask)-1]: iend.append(len(mask)-1)
1234        if len(istart) != len(iend):
1235            raise RuntimeError("Numbers of mask start != mask end.")
1236        for i in range(len(istart)):
1237            if istart[i] > iend[i]:
1238                raise RuntimeError("Mask start index > mask end index")
1239                break
1240        return istart,iend
1241
1242#    def get_restfreqs(self):
1243#        """
1244#        Get the restfrequency(s) stored in this scantable.
1245#        The return value(s) are always of unit 'Hz'
1246#        Parameters:
1247#            none
1248#        Returns:
1249#            a list of doubles
1250#        """
1251#        return list(self._getrestfreqs())
1252
1253    def get_restfreqs(self, ids=None):
1254        """\
1255        Get the restfrequency(s) stored in this scantable.
1256        The return value(s) are always of unit 'Hz'
1257
1258        Parameters:
1259
1260            ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1261                 be retrieved
1262
1263        Returns:
1264
1265            dictionary containing ids and a list of doubles for each id
1266
1267        """
1268        if ids is None:
1269            rfreqs={}
1270            idlist = self.getmolnos()
1271            for i in idlist:
1272                rfreqs[i]=list(self._getrestfreqs(i))
1273            return rfreqs
1274        else:
1275            if type(ids)==list or type(ids)==tuple:
1276                rfreqs={}
1277                for i in ids:
1278                    rfreqs[i]=list(self._getrestfreqs(i))
1279                return rfreqs
1280            else:
1281                return list(self._getrestfreqs(ids))
1282            #return list(self._getrestfreqs(ids))
1283
1284    def set_restfreqs(self, freqs=None, unit='Hz'):
1285        """\
1286        Set or replace the restfrequency specified and
1287        if the 'freqs' argument holds a scalar,
1288        then that rest frequency will be applied to all the selected
1289        data.  If the 'freqs' argument holds
1290        a vector, then it MUST be of equal or smaller length than
1291        the number of IFs (and the available restfrequencies will be
1292        replaced by this vector).  In this case, *all* data have
1293        the restfrequency set per IF according
1294        to the corresponding value you give in the 'freqs' vector.
1295        E.g. 'freqs=[1e9, 2e9]'  would mean IF 0 gets restfreq 1e9 and
1296        IF 1 gets restfreq 2e9.
1297
1298        You can also specify the frequencies via a linecatalog.
1299
1300        Parameters:
1301
1302            freqs:   list of rest frequency values or string idenitfiers
1303
1304            unit:    unit for rest frequency (default 'Hz')
1305
1306
1307        Example::
1308
1309            # set the given restfrequency for the all currently selected IFs
1310            scan.set_restfreqs(freqs=1.4e9)
1311            # set restfrequencies for the n IFs  (n > 1) in the order of the
1312            # list, i.e
1313            # IF0 -> 1.4e9, IF1 ->  1.41e9, IF3 -> 1.42e9
1314            # len(list_of_restfreqs) == nIF
1315            # for nIF == 1 the following will set multiple restfrequency for
1316            # that IF
1317            scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
1318            # set multiple restfrequencies per IF. as a list of lists where
1319            # the outer list has nIF elements, the inner s arbitrary
1320            scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
1321
1322       *Note*:
1323
1324            To do more sophisticate Restfrequency setting, e.g. on a
1325            source and IF basis, use scantable.set_selection() before using
1326            this function::
1327
1328                # provided your scantable is called scan
1329                selection = selector()
1330                selection.set_name("ORION*")
1331                selection.set_ifs([1])
1332                scan.set_selection(selection)
1333                scan.set_restfreqs(freqs=86.6e9)
1334
1335        """
1336        varlist = vars()
1337        from asap import linecatalog
1338        # simple  value
1339        if isinstance(freqs, int) or isinstance(freqs, float):
1340            self._setrestfreqs([freqs], [""], unit)
1341        # list of values
1342        elif isinstance(freqs, list) or isinstance(freqs, tuple):
1343            # list values are scalars
1344            if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
1345                if len(freqs) == 1:
1346                    self._setrestfreqs(freqs, [""], unit)
1347                else:
1348                    # allow the 'old' mode of setting mulitple IFs
1349                    sel = selector()
1350                    savesel = self._getselection()
1351                    iflist = self.getifnos()
1352                    if len(freqs)>len(iflist):
1353                        raise ValueError("number of elements in list of list "
1354                                         "exeeds the current IF selections")
1355                    iflist = self.getifnos()
1356                    for i, fval in enumerate(freqs):
1357                        sel.set_ifs(iflist[i])
1358                        self._setselection(sel)
1359                        self._setrestfreqs([fval], [""], unit)
1360                    self._setselection(savesel)
1361
1362            # list values are dict, {'value'=, 'name'=)
1363            elif isinstance(freqs[-1], dict):
1364                values = []
1365                names = []
1366                for d in freqs:
1367                    values.append(d["value"])
1368                    names.append(d["name"])
1369                self._setrestfreqs(values, names, unit)
1370            elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
1371                sel = selector()
1372                savesel = self._getselection()
1373                iflist = self.getifnos()
1374                if len(freqs)>len(iflist):
1375                    raise ValueError("number of elements in list of list exeeds"
1376                                     " the current IF selections")
1377                for i, fval in enumerate(freqs):
1378                    sel.set_ifs(iflist[i])
1379                    self._setselection(sel)
1380                    self._setrestfreqs(fval, [""], unit)
1381                self._setselection(savesel)
1382        # freqs are to be taken from a linecatalog
1383        elif isinstance(freqs, linecatalog):
1384            sel = selector()
1385            savesel = self._getselection()
1386            for i in xrange(freqs.nrow()):
1387                sel.set_ifs(iflist[i])
1388                self._setselection(sel)
1389                self._setrestfreqs([freqs.get_frequency(i)],
1390                                   [freqs.get_name(i)], "MHz")
1391                # ensure that we are not iterating past nIF
1392                if i == self.nif()-1: break
1393            self._setselection(savesel)
1394        else:
1395            return
1396        self._add_history("set_restfreqs", varlist)
1397
1398    def shift_refpix(self, delta):
1399        """\
1400        Shift the reference pixel of the Spectra Coordinate by an
1401        integer amount.
1402
1403        Parameters:
1404
1405            delta:   the amount to shift by
1406
1407        *Note*:
1408
1409            Be careful using this with broadband data.
1410
1411        """
1412        Scantable.shift_refpix(self, delta)
1413
1414    @asaplog_post_dec
1415    def history(self, filename=None):
1416        """\
1417        Print the history. Optionally to a file.
1418
1419        Parameters:
1420
1421            filename:    The name of the file to save the history to.
1422
1423        """
1424        hist = list(self._gethistory())
1425        out = "-"*80
1426        for h in hist:
1427            if h.startswith("---"):
1428                out = "\n".join([out, h])
1429            else:
1430                items = h.split("##")
1431                date = items[0]
1432                func = items[1]
1433                items = items[2:]
1434                out += "\n"+date+"\n"
1435                out += "Function: %s\n  Parameters:" % (func)
1436                for i in items:
1437                    if i == '':
1438                        continue
1439                    s = i.split("=")
1440                    out += "\n   %s = %s" % (s[0], s[1])
1441                out = "\n".join([out, "-"*80])
1442        if filename is not None:
1443            if filename is "":
1444                filename = 'scantable_history.txt'
1445            import os
1446            filename = os.path.expandvars(os.path.expanduser(filename))
1447            if not os.path.isdir(filename):
1448                data = open(filename, 'w')
1449                data.write(out)
1450                data.close()
1451            else:
1452                msg = "Illegal file name '%s'." % (filename)
1453                raise IOError(msg)
1454        return page(out)
1455    #
1456    # Maths business
1457    #
1458    @asaplog_post_dec
1459    def average_time(self, mask=None, scanav=False, weight='tint', align=False):
1460        """\
1461        Return the (time) weighted average of a scan.
1462
1463        *Note*:
1464
1465            in channels only - align if necessary
1466
1467        Parameters:
1468
1469            mask:     an optional mask (only used for 'var' and 'tsys'
1470                      weighting)
1471
1472            scanav:   True averages each scan separately
1473                      False (default) averages all scans together,
1474
1475            weight:   Weighting scheme.
1476                      'none'     (mean no weight)
1477                      'var'      (1/var(spec) weighted)
1478                      'tsys'     (1/Tsys**2 weighted)
1479                      'tint'     (integration time weighted)
1480                      'tintsys'  (Tint/Tsys**2)
1481                      'median'   ( median averaging)
1482                      The default is 'tint'
1483
1484            align:    align the spectra in velocity before averaging. It takes
1485                      the time of the first spectrum as reference time.
1486
1487        Example::
1488
1489            # time average the scantable without using a mask
1490            newscan = scan.average_time()
1491
1492        """
1493        varlist = vars()
1494        weight = weight or 'TINT'
1495        mask = mask or ()
1496        scanav = (scanav and 'SCAN') or 'NONE'
1497        scan = (self, )
1498
1499        if align:
1500            scan = (self.freq_align(insitu=False), )
1501        s = None
1502        if weight.upper() == 'MEDIAN':
1503            s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1504                                                     scanav))
1505        else:
1506            s = scantable(self._math._average(scan, mask, weight.upper(),
1507                          scanav))
1508        s._add_history("average_time", varlist)
1509        return s
1510
1511    @asaplog_post_dec
1512    def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
1513        """\
1514        Return a scan where all spectra are converted to either
1515        Jansky or Kelvin depending upon the flux units of the scan table.
1516        By default the function tries to look the values up internally.
1517        If it can't find them (or if you want to over-ride), you must
1518        specify EITHER jyperk OR eta (and D which it will try to look up
1519        also if you don't set it). jyperk takes precedence if you set both.
1520
1521        Parameters:
1522
1523            jyperk:      the Jy / K conversion factor
1524
1525            eta:         the aperture efficiency
1526
1527            d:           the geometric diameter (metres)
1528
1529            insitu:      if False a new scantable is returned.
1530                         Otherwise, the scaling is done in-situ
1531                         The default is taken from .asaprc (False)
1532
1533        """
1534        if insitu is None: insitu = rcParams['insitu']
1535        self._math._setinsitu(insitu)
1536        varlist = vars()
1537        jyperk = jyperk or -1.0
1538        d = d or -1.0
1539        eta = eta or -1.0
1540        s = scantable(self._math._convertflux(self, d, eta, jyperk))
1541        s._add_history("convert_flux", varlist)
1542        if insitu: self._assign(s)
1543        else: return s
1544
1545    @asaplog_post_dec
1546    def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1547        """\
1548        Return a scan after applying a gain-elevation correction.
1549        The correction can be made via either a polynomial or a
1550        table-based interpolation (and extrapolation if necessary).
1551        You specify polynomial coefficients, an ascii table or neither.
1552        If you specify neither, then a polynomial correction will be made
1553        with built in coefficients known for certain telescopes (an error
1554        will occur if the instrument is not known).
1555        The data and Tsys are *divided* by the scaling factors.
1556
1557        Parameters:
1558
1559            poly:        Polynomial coefficients (default None) to compute a
1560                         gain-elevation correction as a function of
1561                         elevation (in degrees).
1562
1563            filename:    The name of an ascii file holding correction factors.
1564                         The first row of the ascii file must give the column
1565                         names and these MUST include columns
1566                         "ELEVATION" (degrees) and "FACTOR" (multiply data
1567                         by this) somewhere.
1568                         The second row must give the data type of the
1569                         column. Use 'R' for Real and 'I' for Integer.
1570                         An example file would be
1571                         (actual factors are arbitrary) :
1572
1573                         TIME ELEVATION FACTOR
1574                         R R R
1575                         0.1 0 0.8
1576                         0.2 20 0.85
1577                         0.3 40 0.9
1578                         0.4 60 0.85
1579                         0.5 80 0.8
1580                         0.6 90 0.75
1581
1582            method:      Interpolation method when correcting from a table.
1583                         Values are  "nearest", "linear" (default), "cubic"
1584                         and "spline"
1585
1586            insitu:      if False a new scantable is returned.
1587                         Otherwise, the scaling is done in-situ
1588                         The default is taken from .asaprc (False)
1589
1590        """
1591
1592        if insitu is None: insitu = rcParams['insitu']
1593        self._math._setinsitu(insitu)
1594        varlist = vars()
1595        poly = poly or ()
1596        from os.path import expandvars
1597        filename = expandvars(filename)
1598        s = scantable(self._math._gainel(self, poly, filename, method))
1599        s._add_history("gain_el", varlist)
1600        if insitu:
1601            self._assign(s)
1602        else:
1603            return s
1604
1605    @asaplog_post_dec
1606    def freq_align(self, reftime=None, method='cubic', insitu=None):
1607        """\
1608        Return a scan where all rows have been aligned in frequency/velocity.
1609        The alignment frequency frame (e.g. LSRK) is that set by function
1610        set_freqframe.
1611
1612        Parameters:
1613
1614            reftime:     reference time to align at. By default, the time of
1615                         the first row of data is used.
1616
1617            method:      Interpolation method for regridding the spectra.
1618                         Choose from "nearest", "linear", "cubic" (default)
1619                         and "spline"
1620
1621            insitu:      if False a new scantable is returned.
1622                         Otherwise, the scaling is done in-situ
1623                         The default is taken from .asaprc (False)
1624
1625        """
1626        if insitu is None: insitu = rcParams["insitu"]
1627        self._math._setinsitu(insitu)
1628        varlist = vars()
1629        reftime = reftime or ""
1630        s = scantable(self._math._freq_align(self, reftime, method))
1631        s._add_history("freq_align", varlist)
1632        if insitu: self._assign(s)
1633        else: return s
1634
1635    @asaplog_post_dec
1636    def opacity(self, tau=None, insitu=None):
1637        """\
1638        Apply an opacity correction. The data
1639        and Tsys are multiplied by the correction factor.
1640
1641        Parameters:
1642
1643            tau:         (list of) opacity from which the correction factor is
1644                         exp(tau*ZD)
1645                         where ZD is the zenith-distance.
1646                         If a list is provided, it has to be of length nIF,
1647                         nIF*nPol or 1 and in order of IF/POL, e.g.
1648                         [opif0pol0, opif0pol1, opif1pol0 ...]
1649                         if tau is `None` the opacities are determined from a
1650                         model.
1651
1652            insitu:      if False a new scantable is returned.
1653                         Otherwise, the scaling is done in-situ
1654                         The default is taken from .asaprc (False)
1655
1656        """
1657        if insitu is None: insitu = rcParams['insitu']
1658        self._math._setinsitu(insitu)
1659        varlist = vars()
1660        if not hasattr(tau, "__len__"):
1661            tau = [tau]
1662        s = scantable(self._math._opacity(self, tau))
1663        s._add_history("opacity", varlist)
1664        if insitu: self._assign(s)
1665        else: return s
1666
1667    @asaplog_post_dec
1668    def bin(self, width=5, insitu=None):
1669        """\
1670        Return a scan where all spectra have been binned up.
1671
1672        Parameters:
1673
1674            width:       The bin width (default=5) in pixels
1675
1676            insitu:      if False a new scantable is returned.
1677                         Otherwise, the scaling is done in-situ
1678                         The default is taken from .asaprc (False)
1679
1680        """
1681        if insitu is None: insitu = rcParams['insitu']
1682        self._math._setinsitu(insitu)
1683        varlist = vars()
1684        s = scantable(self._math._bin(self, width))
1685        s._add_history("bin", varlist)
1686        if insitu:
1687            self._assign(s)
1688        else:
1689            return s
1690
1691    @asaplog_post_dec
1692    def resample(self, width=5, method='cubic', insitu=None):
1693        """\
1694        Return a scan where all spectra have been binned up.
1695
1696        Parameters:
1697
1698            width:       The bin width (default=5) in pixels
1699
1700            method:      Interpolation method when correcting from a table.
1701                         Values are  "nearest", "linear", "cubic" (default)
1702                         and "spline"
1703
1704            insitu:      if False a new scantable is returned.
1705                         Otherwise, the scaling is done in-situ
1706                         The default is taken from .asaprc (False)
1707
1708        """
1709        if insitu is None: insitu = rcParams['insitu']
1710        self._math._setinsitu(insitu)
1711        varlist = vars()
1712        s = scantable(self._math._resample(self, method, width))
1713        s._add_history("resample", varlist)
1714        if insitu: self._assign(s)
1715        else: return s
1716
1717    @asaplog_post_dec
1718    def average_pol(self, mask=None, weight='none'):
1719        """\
1720        Average the Polarisations together.
1721
1722        Parameters:
1723
1724            mask:        An optional mask defining the region, where the
1725                         averaging will be applied. The output will have all
1726                         specified points masked.
1727
1728            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1729                         weighted), or 'tsys' (1/Tsys**2 weighted)
1730
1731        """
1732        varlist = vars()
1733        mask = mask or ()
1734        s = scantable(self._math._averagepol(self, mask, weight.upper()))
1735        s._add_history("average_pol", varlist)
1736        return s
1737
1738    @asaplog_post_dec
1739    def average_beam(self, mask=None, weight='none'):
1740        """\
1741        Average the Beams together.
1742
1743        Parameters:
1744            mask:        An optional mask defining the region, where the
1745                         averaging will be applied. The output will have all
1746                         specified points masked.
1747
1748            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1749                         weighted), or 'tsys' (1/Tsys**2 weighted)
1750
1751        """
1752        varlist = vars()
1753        mask = mask or ()
1754        s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1755        s._add_history("average_beam", varlist)
1756        return s
1757
1758    def parallactify(self, pflag):
1759        """\
1760        Set a flag to indicate whether this data should be treated as having
1761        been 'parallactified' (total phase == 0.0)
1762
1763        Parameters:
1764
1765            pflag:  Bool indicating whether to turn this on (True) or
1766                    off (False)
1767
1768        """
1769        varlist = vars()
1770        self._parallactify(pflag)
1771        self._add_history("parallactify", varlist)
1772
1773    @asaplog_post_dec
1774    def convert_pol(self, poltype=None):
1775        """\
1776        Convert the data to a different polarisation type.
1777        Note that you will need cross-polarisation terms for most conversions.
1778
1779        Parameters:
1780
1781            poltype:    The new polarisation type. Valid types are:
1782                        "linear", "circular", "stokes" and "linpol"
1783
1784        """
1785        varlist = vars()
1786        s = scantable(self._math._convertpol(self, poltype))
1787        s._add_history("convert_pol", varlist)
1788        return s
1789
1790    @asaplog_post_dec
1791    def smooth(self, kernel="hanning", width=5.0, order=2, plot=False, insitu=None):
1792        """\
1793        Smooth the spectrum by the specified kernel (conserving flux).
1794
1795        Parameters:
1796
1797            kernel:     The type of smoothing kernel. Select from
1798                        'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1799                        or 'poly'
1800
1801            width:      The width of the kernel in pixels. For hanning this is
1802                        ignored otherwise it defauls to 5 pixels.
1803                        For 'gaussian' it is the Full Width Half
1804                        Maximum. For 'boxcar' it is the full width.
1805                        For 'rmedian' and 'poly' it is the half width.
1806
1807            order:      Optional parameter for 'poly' kernel (default is 2), to
1808                        specify the order of the polnomial. Ignored by all other
1809                        kernels.
1810
1811            plot:       plot the original and the smoothed spectra.
1812                        In this each indivual fit has to be approved, by
1813                        typing 'y' or 'n'
1814
1815            insitu:     if False a new scantable is returned.
1816                        Otherwise, the scaling is done in-situ
1817                        The default is taken from .asaprc (False)
1818
1819        """
1820        if insitu is None: insitu = rcParams['insitu']
1821        self._math._setinsitu(insitu)
1822        varlist = vars()
1823
1824        if plot: orgscan = self.copy()
1825
1826        s = scantable(self._math._smooth(self, kernel.lower(), width, order))
1827        s._add_history("smooth", varlist)
1828
1829        if plot:
1830            if rcParams['plotter.gui']:
1831                from asap.asaplotgui import asaplotgui as asaplot
1832            else:
1833                from asap.asaplot import asaplot
1834            self._p=asaplot()
1835            self._p.set_panels()
1836            ylab=s._get_ordinate_label()
1837            #self._p.palette(0,["#777777","red"])
1838            for r in xrange(s.nrow()):
1839                xsm=s._getabcissa(r)
1840                ysm=s._getspectrum(r)
1841                xorg=orgscan._getabcissa(r)
1842                yorg=orgscan._getspectrum(r)
1843                self._p.clear()
1844                self._p.hold()
1845                self._p.set_axes('ylabel',ylab)
1846                self._p.set_axes('xlabel',s._getabcissalabel(r))
1847                self._p.set_axes('title',s._getsourcename(r))
1848                self._p.set_line(label='Original',color="#777777")
1849                self._p.plot(xorg,yorg)
1850                self._p.set_line(label='Smoothed',color="red")
1851                self._p.plot(xsm,ysm)
1852                ### Ugly part for legend
1853                for i in [0,1]:
1854                    self._p.subplots[0]['lines'].append([self._p.subplots[0]['axes'].lines[i]])
1855                self._p.release()
1856                ### Ugly part for legend
1857                self._p.subplots[0]['lines']=[]
1858                res = raw_input("Accept smoothing ([y]/n): ")
1859                if res.upper() == 'N':
1860                    s._setspectrum(yorg, r)
1861            self._p.unmap()
1862            self._p = None
1863            del orgscan
1864
1865        if insitu: self._assign(s)
1866        else: return s
1867
1868    @asaplog_post_dec
1869    def old_poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None, rows=None):
1870        """\
1871        Return a scan which has been baselined (all rows) by a polynomial.
1872       
1873        Parameters:
1874
1875            mask:       an optional mask
1876
1877            order:      the order of the polynomial (default is 0)
1878
1879            plot:       plot the fit and the residual. In this each
1880                        indivual fit has to be approved, by typing 'y'
1881                        or 'n'
1882
1883            uselin:     use linear polynomial fit
1884
1885            insitu:     if False a new scantable is returned.
1886                        Otherwise, the scaling is done in-situ
1887                        The default is taken from .asaprc (False)
1888
1889            rows:       row numbers of spectra to be processed.
1890                        (default is None: for all rows)
1891       
1892        Example:
1893            # return a scan baselined by a third order polynomial,
1894            # not using a mask
1895            bscan = scan.poly_baseline(order=3)
1896
1897        """
1898        if insitu is None: insitu = rcParams['insitu']
1899        if not insitu:
1900            workscan = self.copy()
1901        else:
1902            workscan = self
1903        varlist = vars()
1904        if mask is None:
1905            mask = [True for i in xrange(self.nchan())]
1906
1907        try:
1908            f = fitter()
1909            if uselin:
1910                f.set_function(lpoly=order)
1911            else:
1912                f.set_function(poly=order)
1913
1914            if rows == None:
1915                rows = xrange(workscan.nrow())
1916            elif isinstance(rows, int):
1917                rows = [ rows ]
1918           
1919            if len(rows) > 0:
1920                self.blpars = []
1921                self.masklists = []
1922                self.actualmask = []
1923           
1924            for r in rows:
1925                f.x = workscan._getabcissa(r)
1926                f.y = workscan._getspectrum(r)
1927                f.mask = mask_and(mask, workscan._getmask(r))    # (CAS-1434)
1928                f.data = None
1929                f.fit()
1930                if plot:
1931                    f.plot(residual=True)
1932                    x = raw_input("Accept fit ( [y]/n ): ")
1933                    if x.upper() == 'N':
1934                        self.blpars.append(None)
1935                        self.masklists.append(None)
1936                        self.actualmask.append(None)
1937                        continue
1938                workscan._setspectrum(f.fitter.getresidual(), r)
1939                self.blpars.append(f.get_parameters())
1940                self.masklists.append(workscan.get_masklist(f.mask, row=r, silent=True))
1941                self.actualmask.append(f.mask)
1942
1943            if plot:
1944                f._p.unmap()
1945                f._p = None
1946            workscan._add_history("poly_baseline", varlist)
1947            if insitu:
1948                self._assign(workscan)
1949            else:
1950                return workscan
1951        except RuntimeError:
1952            msg = "The fit failed, possibly because it didn't converge."
1953            raise RuntimeError(msg)
1954
1955    @asaplog_post_dec
1956    def poly_baseline(self, mask=None, order=0, plot=False, batch=False, insitu=None, rows=None):
1957        """\
1958        Return a scan which has been baselined (all rows) by a polynomial.
1959        Parameters:
1960            mask:       an optional mask
1961            order:      the order of the polynomial (default is 0)
1962            plot:       plot the fit and the residual. In this each
1963                        indivual fit has to be approved, by typing 'y'
1964                        or 'n'. Ignored if batch = True.
1965            batch:      if True a faster algorithm is used and logs
1966                        including the fit results are not output
1967                        (default is False)
1968            insitu:     if False a new scantable is returned.
1969                        Otherwise, the scaling is done in-situ
1970                        The default is taken from .asaprc (False)
1971            rows:       row numbers of spectra to be baselined.
1972                        (default is None: for all rows)
1973        Example:
1974            # return a scan baselined by a third order polynomial,
1975            # not using a mask
1976            bscan = scan.poly_baseline(order=3)
1977        """
1978       
1979        varlist = vars()
1980       
1981        if insitu is None: insitu = rcParams["insitu"]
1982        if insitu:
1983            workscan = self
1984        else:
1985            workscan = self.copy()
1986
1987        nchan = workscan.nchan()
1988       
1989        if mask is None:
1990            mask = [True for i in xrange(nchan)]
1991
1992        try:
1993            if rows == None:
1994                rows = xrange(workscan.nrow())
1995            elif isinstance(rows, int):
1996                rows = [ rows ]
1997           
1998            if len(rows) > 0:
1999                workscan.blpars = []
2000                workscan.masklists = []
2001                workscan.actualmask = []
2002
2003            if batch:
2004                workscan._poly_baseline_batch(mask, order)
2005            elif plot:
2006                f = fitter()
2007                f.set_function(lpoly=order)
2008                for r in rows:
2009                    f.x = workscan._getabcissa(r)
2010                    f.y = workscan._getspectrum(r)
2011                    f.mask = mask_and(mask, workscan._getmask(r))    # (CAS-1434)
2012                    f.data = None
2013                    f.fit()
2014                   
2015                    f.plot(residual=True)
2016                    accept_fit = raw_input("Accept fit ( [y]/n ): ")
2017                    if accept_fit.upper() == "N":
2018                        self.blpars.append(None)
2019                        self.masklists.append(None)
2020                        self.actualmask.append(None)
2021                        continue
2022                    workscan._setspectrum(f.fitter.getresidual(), r)
2023                    workscan.blpars.append(f.get_parameters())
2024                    workscan.masklists.append(workscan.get_masklist(f.mask, row=r))
2025                    workscan.actualmask.append(f.mask)
2026                   
2027                f._p.unmap()
2028                f._p = None
2029            else:
2030                for r in rows:
2031                    fitparams = workscan._poly_baseline(mask, order, r)
2032                    params = fitparams.getparameters()
2033                    fmtd = ", ".join(["p%d = %3.6f" % (i, v) for i, v in enumerate(params)])
2034                    errors = fitparams.geterrors()
2035                    fmask = mask_and(mask, workscan._getmask(r))
2036
2037                    workscan.blpars.append({"params":params,
2038                                            "fixed": fitparams.getfixedparameters(),
2039                                            "formatted":fmtd, "errors":errors})
2040                    workscan.masklists.append(workscan.get_masklist(fmask, r, silent=True))
2041                    workscan.actualmask.append(fmask)
2042                   
2043                    asaplog.push(fmtd)
2044           
2045            workscan._add_history("poly_baseline", varlist)
2046           
2047            if insitu:
2048                self._assign(workscan)
2049            else:
2050                return workscan
2051           
2052        except RuntimeError, e:
2053            msg = "The fit failed, possibly because it didn't converge."
2054            if rcParams["verbose"]:
2055                asaplog.push(str(e))
2056                asaplog.push(str(msg))
2057                return
2058            else:
2059                raise RuntimeError(str(e)+'\n'+msg)
2060
2061
2062    def auto_poly_baseline(self, mask=None, edge=(0, 0), order=0,
2063                           threshold=3, chan_avg_limit=1, plot=False,
2064                           insitu=None, rows=None):
2065        """\
2066        Return a scan which has been baselined (all rows) by a polynomial.
2067        Spectral lines are detected first using linefinder and masked out
2068        to avoid them affecting the baseline solution.
2069
2070        Parameters:
2071
2072            mask:       an optional mask retreived from scantable
2073
2074            edge:       an optional number of channel to drop at the edge of
2075                        spectrum. If only one value is
2076                        specified, the same number will be dropped from
2077                        both sides of the spectrum. Default is to keep
2078                        all channels. Nested tuples represent individual
2079                        edge selection for different IFs (a number of spectral
2080                        channels can be different)
2081
2082            order:      the order of the polynomial (default is 0)
2083
2084            threshold:  the threshold used by line finder. It is better to
2085                        keep it large as only strong lines affect the
2086                        baseline solution.
2087
2088            chan_avg_limit:
2089                        a maximum number of consequtive spectral channels to
2090                        average during the search of weak and broad lines.
2091                        The default is no averaging (and no search for weak
2092                        lines). If such lines can affect the fitted baseline
2093                        (e.g. a high order polynomial is fitted), increase this
2094                        parameter (usually values up to 8 are reasonable). Most
2095                        users of this method should find the default value
2096                        sufficient.
2097
2098            plot:       plot the fit and the residual. In this each
2099                        indivual fit has to be approved, by typing 'y'
2100                        or 'n'
2101
2102            insitu:     if False a new scantable is returned.
2103                        Otherwise, the scaling is done in-situ
2104                        The default is taken from .asaprc (False)
2105            rows:       row numbers of spectra to be processed.
2106                        (default is None: for all rows)
2107
2108
2109        Example::
2110
2111            scan2 = scan.auto_poly_baseline(order=7, insitu=False)
2112
2113        """
2114        if insitu is None: insitu = rcParams['insitu']
2115        varlist = vars()
2116        from asap.asaplinefind import linefinder
2117        from asap import _is_sequence_or_number as _is_valid
2118
2119        # check whether edge is set up for each IF individually
2120        individualedge = False;
2121        if len(edge) > 1:
2122            if isinstance(edge[0], list) or isinstance(edge[0], tuple):
2123                individualedge = True;
2124
2125        if not _is_valid(edge, int) and not individualedge:
2126            raise ValueError, "Parameter 'edge' has to be an integer or a \
2127            pair of integers specified as a tuple. Nested tuples are allowed \
2128            to make individual selection for different IFs."
2129
2130        curedge = (0, 0)
2131        if individualedge:
2132            for edgepar in edge:
2133                if not _is_valid(edgepar, int):
2134                    raise ValueError, "Each element of the 'edge' tuple has \
2135                                       to be a pair of integers or an integer."
2136        else:
2137            curedge = edge;
2138
2139        if not insitu:
2140            workscan = self.copy()
2141        else:
2142            workscan = self
2143
2144        # setup fitter
2145        f = fitter()
2146        f.set_function(lpoly=order)
2147
2148        # setup line finder
2149        fl = linefinder()
2150        fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
2151
2152        fl.set_scan(workscan)
2153
2154        if mask is None:
2155            mask = _n_bools(workscan.nchan(), True)
2156       
2157        if rows is None:
2158            rows = xrange(workscan.nrow())
2159        elif isinstance(rows, int):
2160            rows = [ rows ]
2161       
2162        # Save parameters of baseline fits & masklists as a class attribute.
2163        # NOTICE: It does not reflect changes in scantable!
2164        if len(rows) > 0:
2165            self.blpars=[]
2166            self.masklists=[]
2167            self.actualmask=[]
2168        asaplog.push("Processing:")
2169        for r in rows:
2170            msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
2171                (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
2172                 workscan.getpol(r), workscan.getcycle(r))
2173            asaplog.push(msg, False)
2174
2175            # figure out edge parameter
2176            if individualedge:
2177                if len(edge) >= workscan.getif(r):
2178                    raise RuntimeError, "Number of edge elements appear to " \
2179                                        "be less than the number of IFs"
2180                    curedge = edge[workscan.getif(r)]
2181
2182            actualmask = mask_and(mask, workscan._getmask(r))    # (CAS-1434)
2183
2184            # setup line finder
2185            fl.find_lines(r, actualmask, curedge)
2186           
2187            f.x = workscan._getabcissa(r)
2188            f.y = workscan._getspectrum(r)
2189            f.mask = fl.get_mask()
2190            f.data = None
2191            f.fit()
2192
2193            # Show mask list
2194            masklist=workscan.get_masklist(f.mask, row=r, silent=True)
2195            msg = "mask range: "+str(masklist)
2196            asaplog.push(msg, False)
2197
2198            if plot:
2199                f.plot(residual=True)
2200                x = raw_input("Accept fit ( [y]/n ): ")
2201                if x.upper() == 'N':
2202                    self.blpars.append(None)
2203                    self.masklists.append(None)
2204                    self.actualmask.append(None)
2205                    continue
2206
2207            workscan._setspectrum(f.fitter.getresidual(), r)
2208            self.blpars.append(f.get_parameters())
2209            self.masklists.append(masklist)
2210            self.actualmask.append(f.mask)
2211        if plot:
2212            f._p.unmap()
2213            f._p = None
2214        workscan._add_history("auto_poly_baseline", varlist)
2215        if insitu:
2216            self._assign(workscan)
2217        else:
2218            return workscan
2219
2220    @asaplog_post_dec
2221    def rotate_linpolphase(self, angle):
2222        """\
2223        Rotate the phase of the complex polarization O=Q+iU correlation.
2224        This is always done in situ in the raw data.  So if you call this
2225        function more than once then each call rotates the phase further.
2226
2227        Parameters:
2228
2229            angle:   The angle (degrees) to rotate (add) by.
2230
2231        Example::
2232
2233            scan.rotate_linpolphase(2.3)
2234
2235        """
2236        varlist = vars()
2237        self._math._rotate_linpolphase(self, angle)
2238        self._add_history("rotate_linpolphase", varlist)
2239        return
2240
2241    @asaplog_post_dec
2242    def rotate_xyphase(self, angle):
2243        """\
2244        Rotate the phase of the XY correlation.  This is always done in situ
2245        in the data.  So if you call this function more than once
2246        then each call rotates the phase further.
2247
2248        Parameters:
2249
2250            angle:   The angle (degrees) to rotate (add) by.
2251
2252        Example::
2253
2254            scan.rotate_xyphase(2.3)
2255
2256        """
2257        varlist = vars()
2258        self._math._rotate_xyphase(self, angle)
2259        self._add_history("rotate_xyphase", varlist)
2260        return
2261
2262    @asaplog_post_dec
2263    def swap_linears(self):
2264        """\
2265        Swap the linear polarisations XX and YY, or better the first two
2266        polarisations as this also works for ciculars.
2267        """
2268        varlist = vars()
2269        self._math._swap_linears(self)
2270        self._add_history("swap_linears", varlist)
2271        return
2272
2273    @asaplog_post_dec
2274    def invert_phase(self):
2275        """\
2276        Invert the phase of the complex polarisation
2277        """
2278        varlist = vars()
2279        self._math._invert_phase(self)
2280        self._add_history("invert_phase", varlist)
2281        return
2282
2283    @asaplog_post_dec
2284    def add(self, offset, insitu=None):
2285        """\
2286        Return a scan where all spectra have the offset added
2287
2288        Parameters:
2289
2290            offset:      the offset
2291
2292            insitu:      if False a new scantable is returned.
2293                         Otherwise, the scaling is done in-situ
2294                         The default is taken from .asaprc (False)
2295
2296        """
2297        if insitu is None: insitu = rcParams['insitu']
2298        self._math._setinsitu(insitu)
2299        varlist = vars()
2300        s = scantable(self._math._unaryop(self, offset, "ADD", False))
2301        s._add_history("add", varlist)
2302        if insitu:
2303            self._assign(s)
2304        else:
2305            return s
2306
2307    @asaplog_post_dec
2308    def scale(self, factor, tsys=True, insitu=None):
2309        """\
2310
2311        Return a scan where all spectra are scaled by the given 'factor'
2312
2313        Parameters:
2314
2315            factor:      the scaling factor (float or 1D float list)
2316
2317            insitu:      if False a new scantable is returned.
2318                         Otherwise, the scaling is done in-situ
2319                         The default is taken from .asaprc (False)
2320
2321            tsys:        if True (default) then apply the operation to Tsys
2322                         as well as the data
2323
2324        """
2325        if insitu is None: insitu = rcParams['insitu']
2326        self._math._setinsitu(insitu)
2327        varlist = vars()
2328        s = None
2329        import numpy
2330        if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
2331            if isinstance(factor[0], list) or isinstance(factor[0], numpy.ndarray):
2332                from asapmath import _array2dOp
2333                s = _array2dOp( self.copy(), factor, "MUL", tsys )
2334            else:
2335                s = scantable( self._math._arrayop( self.copy(), factor, "MUL", tsys ) )
2336        else:
2337            s = scantable(self._math._unaryop(self.copy(), factor, "MUL", tsys))
2338        s._add_history("scale", varlist)
2339        if insitu:
2340            self._assign(s)
2341        else:
2342            return s
2343
2344    def set_sourcetype(self, match, matchtype="pattern",
2345                       sourcetype="reference"):
2346        """\
2347        Set the type of the source to be an source or reference scan
2348        using the provided pattern.
2349
2350        Parameters:
2351
2352            match:          a Unix style pattern, regular expression or selector
2353
2354            matchtype:      'pattern' (default) UNIX style pattern or
2355                            'regex' regular expression
2356
2357            sourcetype:     the type of the source to use (source/reference)
2358
2359        """
2360        varlist = vars()
2361        basesel = self.get_selection()
2362        stype = -1
2363        if sourcetype.lower().startswith("r"):
2364            stype = 1
2365        elif sourcetype.lower().startswith("s"):
2366            stype = 0
2367        else:
2368            raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
2369        if matchtype.lower().startswith("p"):
2370            matchtype = "pattern"
2371        elif matchtype.lower().startswith("r"):
2372            matchtype = "regex"
2373        else:
2374            raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
2375        sel = selector()
2376        if isinstance(match, selector):
2377            sel = match
2378        else:
2379            sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
2380        self.set_selection(basesel+sel)
2381        self._setsourcetype(stype)
2382        self.set_selection(basesel)
2383        self._add_history("set_sourcetype", varlist)
2384
2385    @asaplog_post_dec
2386    @preserve_selection
2387    def auto_quotient(self, preserve=True, mode='paired', verify=False):
2388        """\
2389        This function allows to build quotients automatically.
2390        It assumes the observation to have the same number of
2391        "ons" and "offs"
2392
2393        Parameters:
2394
2395            preserve:       you can preserve (default) the continuum or
2396                            remove it.  The equations used are
2397
2398                            preserve: Output = Toff * (on/off) - Toff
2399
2400                            remove:   Output = Toff * (on/off) - Ton
2401
2402            mode:           the on/off detection mode
2403                            'paired' (default)
2404                            identifies 'off' scans by the
2405                            trailing '_R' (Mopra/Parkes) or
2406                            '_e'/'_w' (Tid) and matches
2407                            on/off pairs from the observing pattern
2408                            'time'
2409                            finds the closest off in time
2410
2411        .. todo:: verify argument is not implemented
2412
2413        """
2414        varlist = vars()
2415        modes = ["time", "paired"]
2416        if not mode in modes:
2417            msg = "please provide valid mode. Valid modes are %s" % (modes)
2418            raise ValueError(msg)
2419        s = None
2420        if mode.lower() == "paired":
2421            sel = self.get_selection()
2422            sel.set_query("SRCTYPE==psoff")
2423            self.set_selection(sel)
2424            offs = self.copy()
2425            sel.set_query("SRCTYPE==pson")
2426            self.set_selection(sel)
2427            ons = self.copy()
2428            s = scantable(self._math._quotient(ons, offs, preserve))
2429        elif mode.lower() == "time":
2430            s = scantable(self._math._auto_quotient(self, mode, preserve))
2431        s._add_history("auto_quotient", varlist)
2432        return s
2433
2434    @asaplog_post_dec
2435    def mx_quotient(self, mask = None, weight='median', preserve=True):
2436        """\
2437        Form a quotient using "off" beams when observing in "MX" mode.
2438
2439        Parameters:
2440
2441            mask:           an optional mask to be used when weight == 'stddev'
2442
2443            weight:         How to average the off beams.  Default is 'median'.
2444
2445            preserve:       you can preserve (default) the continuum or
2446                            remove it.  The equations used are:
2447
2448                                preserve: Output = Toff * (on/off) - Toff
2449
2450                                remove:   Output = Toff * (on/off) - Ton
2451
2452        """
2453        mask = mask or ()
2454        varlist = vars()
2455        on = scantable(self._math._mx_extract(self, 'on'))
2456        preoff = scantable(self._math._mx_extract(self, 'off'))
2457        off = preoff.average_time(mask=mask, weight=weight, scanav=False)
2458        from asapmath  import quotient
2459        q = quotient(on, off, preserve)
2460        q._add_history("mx_quotient", varlist)
2461        return q
2462
2463    @asaplog_post_dec
2464    def freq_switch(self, insitu=None):
2465        """\
2466        Apply frequency switching to the data.
2467
2468        Parameters:
2469
2470            insitu:      if False a new scantable is returned.
2471                         Otherwise, the swictching is done in-situ
2472                         The default is taken from .asaprc (False)
2473
2474        """
2475        if insitu is None: insitu = rcParams['insitu']
2476        self._math._setinsitu(insitu)
2477        varlist = vars()
2478        s = scantable(self._math._freqswitch(self))
2479        s._add_history("freq_switch", varlist)
2480        if insitu:
2481            self._assign(s)
2482        else:
2483            return s
2484
2485    @asaplog_post_dec
2486    def recalc_azel(self):
2487        """Recalculate the azimuth and elevation for each position."""
2488        varlist = vars()
2489        self._recalcazel()
2490        self._add_history("recalc_azel", varlist)
2491        return
2492
2493    @asaplog_post_dec
2494    def __add__(self, other):
2495        varlist = vars()
2496        s = None
2497        if isinstance(other, scantable):
2498            s = scantable(self._math._binaryop(self, other, "ADD"))
2499        elif isinstance(other, float):
2500            s = scantable(self._math._unaryop(self, other, "ADD", False))
2501        else:
2502            raise TypeError("Other input is not a scantable or float value")
2503        s._add_history("operator +", varlist)
2504        return s
2505
2506    @asaplog_post_dec
2507    def __sub__(self, other):
2508        """
2509        implicit on all axes and on Tsys
2510        """
2511        varlist = vars()
2512        s = None
2513        if isinstance(other, scantable):
2514            s = scantable(self._math._binaryop(self, other, "SUB"))
2515        elif isinstance(other, float):
2516            s = scantable(self._math._unaryop(self, other, "SUB", False))
2517        else:
2518            raise TypeError("Other input is not a scantable or float value")
2519        s._add_history("operator -", varlist)
2520        return s
2521
2522    @asaplog_post_dec
2523    def __mul__(self, other):
2524        """
2525        implicit on all axes and on Tsys
2526        """
2527        varlist = vars()
2528        s = None
2529        if isinstance(other, scantable):
2530            s = scantable(self._math._binaryop(self, other, "MUL"))
2531        elif isinstance(other, float):
2532            s = scantable(self._math._unaryop(self, other, "MUL", False))
2533        else:
2534            raise TypeError("Other input is not a scantable or float value")
2535        s._add_history("operator *", varlist)
2536        return s
2537
2538
2539    @asaplog_post_dec
2540    def __div__(self, other):
2541        """
2542        implicit on all axes and on Tsys
2543        """
2544        varlist = vars()
2545        s = None
2546        if isinstance(other, scantable):
2547            s = scantable(self._math._binaryop(self, other, "DIV"))
2548        elif isinstance(other, float):
2549            if other == 0.0:
2550                raise ZeroDivisionError("Dividing by zero is not recommended")
2551            s = scantable(self._math._unaryop(self, other, "DIV", False))
2552        else:
2553            raise TypeError("Other input is not a scantable or float value")
2554        s._add_history("operator /", varlist)
2555        return s
2556
2557    @asaplog_post_dec
2558    def get_fit(self, row=0):
2559        """\
2560        Print or return the stored fits for a row in the scantable
2561
2562        Parameters:
2563
2564            row:    the row which the fit has been applied to.
2565
2566        """
2567        if row > self.nrow():
2568            return
2569        from asap.asapfit import asapfit
2570        fit = asapfit(self._getfit(row))
2571        asaplog.push( '%s' %(fit) )
2572        return fit.as_dict()
2573
2574    def flag_nans(self):
2575        """\
2576        Utility function to flag NaN values in the scantable.
2577        """
2578        import numpy
2579        basesel = self.get_selection()
2580        for i in range(self.nrow()):
2581            sel = self.get_row_selector(i)
2582            self.set_selection(basesel+sel)
2583            nans = numpy.isnan(self._getspectrum(0))
2584        if numpy.any(nans):
2585            bnans = [ bool(v) for v in nans]
2586            self.flag(bnans)
2587        self.set_selection(basesel)
2588
2589    def get_row_selector(self, rowno):
2590        return selector(beams=self.getbeam(rowno),
2591                        ifs=self.getif(rowno),
2592                        pols=self.getpol(rowno),
2593                        scans=self.getscan(rowno),
2594                        cycles=self.getcycle(rowno))
2595
2596    def _add_history(self, funcname, parameters):
2597        if not rcParams['scantable.history']:
2598            return
2599        # create date
2600        sep = "##"
2601        from datetime import datetime
2602        dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
2603        hist = dstr+sep
2604        hist += funcname+sep#cdate+sep
2605        if parameters.has_key('self'): del parameters['self']
2606        for k, v in parameters.iteritems():
2607            if type(v) is dict:
2608                for k2, v2 in v.iteritems():
2609                    hist += k2
2610                    hist += "="
2611                    if isinstance(v2, scantable):
2612                        hist += 'scantable'
2613                    elif k2 == 'mask':
2614                        if isinstance(v2, list) or isinstance(v2, tuple):
2615                            hist += str(self._zip_mask(v2))
2616                        else:
2617                            hist += str(v2)
2618                    else:
2619                        hist += str(v2)
2620            else:
2621                hist += k
2622                hist += "="
2623                if isinstance(v, scantable):
2624                    hist += 'scantable'
2625                elif k == 'mask':
2626                    if isinstance(v, list) or isinstance(v, tuple):
2627                        hist += str(self._zip_mask(v))
2628                    else:
2629                        hist += str(v)
2630                else:
2631                    hist += str(v)
2632            hist += sep
2633        hist = hist[:-2] # remove trailing '##'
2634        self._addhistory(hist)
2635
2636
2637    def _zip_mask(self, mask):
2638        mask = list(mask)
2639        i = 0
2640        segments = []
2641        while mask[i:].count(1):
2642            i += mask[i:].index(1)
2643            if mask[i:].count(0):
2644                j = i + mask[i:].index(0)
2645            else:
2646                j = len(mask)
2647            segments.append([i, j])
2648            i = j
2649        return segments
2650
2651    def _get_ordinate_label(self):
2652        fu = "("+self.get_fluxunit()+")"
2653        import re
2654        lbl = "Intensity"
2655        if re.match(".K.", fu):
2656            lbl = "Brightness Temperature "+ fu
2657        elif re.match(".Jy.", fu):
2658            lbl = "Flux density "+ fu
2659        return lbl
2660
2661    def _check_ifs(self):
2662        nchans = [self.nchan(i) for i in range(self.nif(-1))]
2663        nchans = filter(lambda t: t > 0, nchans)
2664        return (sum(nchans)/len(nchans) == nchans[0])
2665
2666    @asaplog_post_dec
2667    #def _fill(self, names, unit, average, getpt, antenna):
2668    def _fill(self, names, unit, average, opts={}):
2669        first = True
2670        fullnames = []
2671        for name in names:
2672            name = os.path.expandvars(name)
2673            name = os.path.expanduser(name)
2674            if not os.path.exists(name):
2675                msg = "File '%s' does not exists" % (name)
2676                raise IOError(msg)
2677            fullnames.append(name)
2678        if average:
2679            asaplog.push('Auto averaging integrations')
2680        stype = int(rcParams['scantable.storage'].lower() == 'disk')
2681        for name in fullnames:
2682            tbl = Scantable(stype)
2683            r = filler(tbl)
2684            rx = rcParams['scantable.reference']
2685            r.setreferenceexpr(rx)
2686            msg = "Importing %s..." % (name)
2687            asaplog.push(msg, False)
2688            #opts = {'ms': {'antenna' : antenna, 'getpt': getpt} }
2689            r.open(name, opts)# antenna, -1, -1, getpt)
2690            r.fill()
2691            if average:
2692                tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
2693            if not first:
2694                tbl = self._math._merge([self, tbl])
2695            Scantable.__init__(self, tbl)
2696            r.close()
2697            del r, tbl
2698            first = False
2699            #flush log
2700        asaplog.post()
2701        if unit is not None:
2702            self.set_fluxunit(unit)
2703        if not is_casapy():
2704            self.set_freqframe(rcParams['scantable.freqframe'])
2705
2706    def __getitem__(self, key):
2707        if key < 0:
2708            key += self.nrow()
2709        if key >= self.nrow():
2710            raise IndexError("Row index out of range.")
2711        return self._getspectrum(key)
2712
2713    def __setitem__(self, key, value):
2714        if key < 0:
2715            key += self.nrow()
2716        if key >= self.nrow():
2717            raise IndexError("Row index out of range.")
2718        if not hasattr(value, "__len__") or \
2719                len(value) > self.nchan(self.getif(key)):
2720            raise ValueError("Spectrum length doesn't match.")
2721        return self._setspectrum(value, key)
2722
2723    def __len__(self):
2724        return self.nrow()
2725
2726    def __iter__(self):
2727        for i in range(len(self)):
2728            yield self[i]
Note: See TracBrowser for help on using the repository browser.