source: branches/polybatch/python/scantable.py @ 1924

Last change on this file since 1924 was 1924, checked in by Malte Marquarding, 14 years ago

Ticket #206: use STFitEntry as return objetc instead of pointer wrnagling.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 92.2 KB
Line 
1"""This module defines the scantable class."""
2
3import os
4try:
5    from functools import wraps as wraps_dec
6except ImportError:
7    from asap.compatibility import wraps as wraps_dec
8
9from asap.env import is_casapy
10from asap._asap import Scantable
11from asap._asap import filler
12from asap.parameters import rcParams
13from asap.logging import asaplog, asaplog_post_dec
14from asap.selector import selector
15from asap.linecatalog import linecatalog
16from asap.coordinate import coordinate
17from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
18from asap.asapfitter import fitter
19
20
21def preserve_selection(func):
22    @wraps_dec(func)
23    def wrap(obj, *args, **kw):
24        basesel = obj.get_selection()
25        try:
26            val = func(obj, *args, **kw)
27        finally:
28            obj.set_selection(basesel)
29        return val
30    return wrap
31
32def is_scantable(filename):
33    """Is the given file a scantable?
34
35    Parameters:
36
37        filename: the name of the file/directory to test
38
39    """
40    if ( os.path.isdir(filename)
41         and os.path.exists(filename+'/table.info')
42         and os.path.exists(filename+'/table.dat') ):
43        f=open(filename+'/table.info')
44        l=f.readline()
45        f.close()
46        #if ( l.find('Scantable') != -1 ):
47        if ( l.find('Measurement Set') == -1 ):
48            return True
49        else:
50            return False
51    else:
52        return False
53##     return (os.path.isdir(filename)
54##             and not os.path.exists(filename+'/table.f1')
55##             and os.path.exists(filename+'/table.info'))
56
57def is_ms(filename):
58    """Is the given file a MeasurementSet?
59
60    Parameters:
61
62        filename: the name of the file/directory to test
63
64    """
65    if ( os.path.isdir(filename)
66         and os.path.exists(filename+'/table.info')
67         and os.path.exists(filename+'/table.dat') ):
68        f=open(filename+'/table.info')
69        l=f.readline()
70        f.close()
71        if ( l.find('Measurement Set') != -1 ):
72            return True
73        else:
74            return False
75    else:
76        return False
77   
78class scantable(Scantable):
79    """\
80        The ASAP container for scans (single-dish data).
81    """
82
83    @asaplog_post_dec
84    #def __init__(self, filename, average=None, unit=None, getpt=None,
85    #             antenna=None, parallactify=None):
86    def __init__(self, filename, average=None, unit=None, parallactify=None, **args):
87        """\
88        Create a scantable from a saved one or make a reference
89
90        Parameters:
91
92            filename:     the name of an asap table on disk
93                          or
94                          the name of a rpfits/sdfits/ms file
95                          (integrations within scans are auto averaged
96                          and the whole file is read) or
97                          [advanced] a reference to an existing scantable
98
99            average:      average all integrations withinb a scan on read.
100                          The default (True) is taken from .asaprc.
101
102            unit:         brightness unit; must be consistent with K or Jy.
103                          Over-rides the default selected by the filler
104                          (input rpfits/sdfits/ms) or replaces the value
105                          in existing scantables
106
107            getpt:        for MeasurementSet input data only:
108                          If True, all pointing data are filled.
109                          The deafult is False, which makes time to load
110                          the MS data faster in some cases.
111
112            antenna:      for MeasurementSet input data only:
113                          Antenna selection. integer (id) or string (name or id).
114
115            parallactify: Indicate that the data had been parallatified. Default
116                          is taken from rc file.
117
118        """
119        if average is None:
120            average = rcParams['scantable.autoaverage']
121        #if getpt is None:
122        #    getpt = True
123        #if antenna is not None:
124        #    asaplog.push("Antenna selection currently unsupported."
125        #                 "Using ''")
126        #    asaplog.post('WARN')
127        #if antenna is None:
128        #    antenna = ''
129        #elif type(antenna) == int:
130        #    antenna = '%s' % antenna
131        #elif type(antenna) == list:
132        #    tmpstr = ''
133        #    for i in range( len(antenna) ):
134        #        if type(antenna[i]) == int:
135        #            tmpstr = tmpstr + ('%s,'%(antenna[i]))
136        #        elif type(antenna[i]) == str:
137        #            tmpstr=tmpstr+antenna[i]+','
138        #        else:
139        #            raise TypeError('Bad antenna selection.')
140        #    antenna = tmpstr.rstrip(',')
141        parallactify = parallactify or rcParams['scantable.parallactify']
142        varlist = vars()
143        from asap._asap import stmath
144        self._math = stmath( rcParams['insitu'] )
145        if isinstance(filename, Scantable):
146            Scantable.__init__(self, filename)
147        else:
148            if isinstance(filename, str):
149                filename = os.path.expandvars(filename)
150                filename = os.path.expanduser(filename)
151                if not os.path.exists(filename):
152                    s = "File '%s' not found." % (filename)
153                    raise IOError(s)
154                if is_scantable(filename):
155                    ondisk = rcParams['scantable.storage'] == 'disk'
156                    Scantable.__init__(self, filename, ondisk)
157                    if unit is not None:
158                        self.set_fluxunit(unit)
159                    # do not reset to the default freqframe
160                    #self.set_freqframe(rcParams['scantable.freqframe'])
161                #elif os.path.isdir(filename) \
162                #         and not os.path.exists(filename+'/table.f1'):
163                elif is_ms(filename):
164                    # Measurement Set
165                    opts={'ms': {}}
166                    mskeys=['getpt','antenna']
167                    for key in mskeys:
168                        if key in args.keys():
169                            opts['ms'][key] = args[key]
170                    #self._fill([filename], unit, average, getpt, antenna)
171                    self._fill([filename], unit, average, opts)
172                elif os.path.isfile(filename):
173                    #self._fill([filename], unit, average, getpt, antenna)
174                    self._fill([filename], unit, average)
175                else:
176                    msg = "The given file '%s'is not a valid " \
177                          "asap table." % (filename)
178                    raise IOError(msg)
179            elif (isinstance(filename, list) or isinstance(filename, tuple)) \
180                  and isinstance(filename[-1], str):
181                #self._fill(filename, unit, average, getpt, antenna)
182                self._fill(filename, unit, average)
183        self.parallactify(parallactify)
184        self._add_history("scantable", varlist)
185
186    @asaplog_post_dec
187    def save(self, name=None, format=None, overwrite=False):
188        """\
189        Store the scantable on disk. This can be an asap (aips++) Table,
190        SDFITS or MS2 format.
191
192        Parameters:
193
194            name:        the name of the outputfile. For format "ASCII"
195                         this is the root file name (data in 'name'.txt
196                         and header in 'name'_header.txt)
197
198            format:      an optional file format. Default is ASAP.
199                         Allowed are:
200
201                            * 'ASAP' (save as ASAP [aips++] Table),
202                            * 'SDFITS' (save as SDFITS file)
203                            * 'ASCII' (saves as ascii text file)
204                            * 'MS2' (saves as an casacore MeasurementSet V2)
205                            * 'FITS' (save as image FITS - not readable by class)
206                            * 'CLASS' (save as FITS readable by CLASS)
207
208            overwrite:   If the file should be overwritten if it exists.
209                         The default False is to return with warning
210                         without writing the output. USE WITH CARE.
211
212        Example::
213
214            scan.save('myscan.asap')
215            scan.save('myscan.sdfits', 'SDFITS')
216
217        """
218        from os import path
219        format = format or rcParams['scantable.save']
220        suffix = '.'+format.lower()
221        if name is None or name == "":
222            name = 'scantable'+suffix
223            msg = "No filename given. Using default name %s..." % name
224            asaplog.push(msg)
225        name = path.expandvars(name)
226        if path.isfile(name) or path.isdir(name):
227            if not overwrite:
228                msg = "File %s exists." % name
229                raise IOError(msg)
230        format2 = format.upper()
231        if format2 == 'ASAP':
232            self._save(name)
233        else:
234            from asap._asap import stwriter as stw
235            writer = stw(format2)
236            writer.write(self, name)
237        return
238
239    def copy(self):
240        """Return a copy of this scantable.
241
242        *Note*:
243
244            This makes a full (deep) copy. scan2 = scan1 makes a reference.
245
246        Example::
247
248            copiedscan = scan.copy()
249
250        """
251        sd = scantable(Scantable._copy(self))
252        return sd
253
254    def drop_scan(self, scanid=None):
255        """\
256        Return a new scantable where the specified scan number(s) has(have)
257        been dropped.
258
259        Parameters:
260
261            scanid:    a (list of) scan number(s)
262
263        """
264        from asap import _is_sequence_or_number as _is_valid
265        from asap import _to_list
266        from asap import unique
267        if not _is_valid(scanid):
268            raise RuntimeError( 'Please specify a scanno to drop from the scantable' )
269        scanid = _to_list(scanid)
270        allscans = unique([ self.getscan(i) for i in range(self.nrow())])
271        for sid in scanid: allscans.remove(sid)
272        if len(allscans) == 0:
273            raise ValueError("Can't remove all scans")
274        sel = selector(scans=allscans)
275        return self._select_copy(sel)
276
277    def _select_copy(self, selection):
278        orig = self.get_selection()
279        self.set_selection(orig+selection)
280        cp = self.copy()
281        self.set_selection(orig)
282        return cp
283
284    def get_scan(self, scanid=None):
285        """\
286        Return a specific scan (by scanno) or collection of scans (by
287        source name) in a new scantable.
288
289        *Note*:
290
291            See scantable.drop_scan() for the inverse operation.
292
293        Parameters:
294
295            scanid:    a (list of) scanno or a source name, unix-style
296                       patterns are accepted for source name matching, e.g.
297                       '*_R' gets all 'ref scans
298
299        Example::
300
301            # get all scans containing the source '323p459'
302            newscan = scan.get_scan('323p459')
303            # get all 'off' scans
304            refscans = scan.get_scan('*_R')
305            # get a susbset of scans by scanno (as listed in scan.summary())
306            newscan = scan.get_scan([0, 2, 7, 10])
307
308        """
309        if scanid is None:
310            raise RuntimeError( 'Please specify a scan no or name to '
311                                'retrieve from the scantable' )
312        try:
313            bsel = self.get_selection()
314            sel = selector()
315            if type(scanid) is str:
316                sel.set_name(scanid)
317                return self._select_copy(sel)
318            elif type(scanid) is int:
319                sel.set_scans([scanid])
320                return self._select_copy(sel)
321            elif type(scanid) is list:
322                sel.set_scans(scanid)
323                return self._select_copy(sel)
324            else:
325                msg = "Illegal scanid type, use 'int' or 'list' if ints."
326                raise TypeError(msg)
327        except RuntimeError:
328            raise
329
330    def __str__(self):
331        return Scantable._summary(self, True)
332
333    def summary(self, filename=None):
334        """\
335        Print a summary of the contents of this scantable.
336
337        Parameters:
338
339            filename:    the name of a file to write the putput to
340                         Default - no file output
341
342        """
343        info = Scantable._summary(self, True)
344        if filename is not None:
345            if filename is "":
346                filename = 'scantable_summary.txt'
347            from os.path import expandvars, isdir
348            filename = expandvars(filename)
349            if not isdir(filename):
350                data = open(filename, 'w')
351                data.write(info)
352                data.close()
353            else:
354                msg = "Illegal file name '%s'." % (filename)
355                raise IOError(msg)
356        return page(info)
357
358    def get_spectrum(self, rowno):
359        """Return the spectrum for the current row in the scantable as a list.
360
361        Parameters:
362
363             rowno:   the row number to retrieve the spectrum from
364
365        """
366        return self._getspectrum(rowno)
367
368    def get_mask(self, rowno):
369        """Return the mask for the current row in the scantable as a list.
370
371        Parameters:
372
373             rowno:   the row number to retrieve the mask from
374
375        """
376        return self._getmask(rowno)
377
378    def set_spectrum(self, spec, rowno):
379        """Return the spectrum for the current row in the scantable as a list.
380
381        Parameters:
382
383             spec:   the new spectrum
384
385             rowno:  the row number to set the spectrum for
386
387        """
388        assert(len(spec) == self.nchan())
389        return self._setspectrum(spec, rowno)
390
391    def get_coordinate(self, rowno):
392        """Return the (spectral) coordinate for a a given 'rowno'.
393
394        *Note*:
395
396            * This coordinate is only valid until a scantable method modifies
397              the frequency axis.
398            * This coordinate does contain the original frequency set-up
399              NOT the new frame. The conversions however are done using the user
400              specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
401              use scantable.freq_align first. Without it there is no closure,
402              i.e.::
403
404                  c = myscan.get_coordinate(0)
405                  c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
406
407        Parameters:
408
409             rowno:    the row number for the spectral coordinate
410
411        """
412        return coordinate(Scantable.get_coordinate(self, rowno))
413
414    def get_selection(self):
415        """\
416        Get the selection object currently set on this scantable.
417
418        Example::
419
420            sel = scan.get_selection()
421            sel.set_ifs(0)              # select IF 0
422            scan.set_selection(sel)     # apply modified selection
423
424        """
425        return selector(self._getselection())
426
427    def set_selection(self, selection=None, **kw):
428        """\
429        Select a subset of the data. All following operations on this scantable
430        are only applied to thi selection.
431
432        Parameters:
433
434            selection:    a selector object (default unset the selection), or
435                          any combination of "pols", "ifs", "beams", "scans",
436                          "cycles", "name", "query"
437
438        Examples::
439
440            sel = selector()         # create a selection object
441            self.set_scans([0, 3])    # select SCANNO 0 and 3
442            scan.set_selection(sel)  # set the selection
443            scan.summary()           # will only print summary of scanno 0 an 3
444            scan.set_selection()     # unset the selection
445            # or the equivalent
446            scan.set_selection(scans=[0,3])
447            scan.summary()           # will only print summary of scanno 0 an 3
448            scan.set_selection()     # unset the selection
449
450        """
451        if selection is None:
452            # reset
453            if len(kw) == 0:
454                selection = selector()
455            else:
456                # try keywords
457                for k in kw:
458                    if k not in selector.fields:
459                        raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
460                selection = selector(**kw)
461        self._setselection(selection)
462
463    def get_row(self, row=0, insitu=None):
464        """\
465        Select a row in the scantable.
466        Return a scantable with single row.
467
468        Parameters:
469
470            row:    row no of integration, default is 0.
471            insitu: if False a new scantable is returned. Otherwise, the
472                    scaling is done in-situ. The default is taken from .asaprc
473                    (False)
474
475        """
476        if insitu is None: insitu = rcParams['insitu']
477        if not insitu:
478            workscan = self.copy()
479        else:
480            workscan = self
481        # Select a row
482        sel=selector()
483        sel.set_scans([workscan.getscan(row)])
484        sel.set_cycles([workscan.getcycle(row)])
485        sel.set_beams([workscan.getbeam(row)])
486        sel.set_ifs([workscan.getif(row)])
487        sel.set_polarisations([workscan.getpol(row)])
488        sel.set_name(workscan._getsourcename(row))
489        workscan.set_selection(sel)
490        if not workscan.nrow() == 1:
491            msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
492            raise RuntimeError(msg)
493        del sel
494        if insitu:
495            self._assign(workscan)
496        else:
497            return workscan
498
499    @asaplog_post_dec
500    def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
501        """\
502        Determine the specified statistic of the current beam/if/pol
503        Takes a 'mask' as an optional parameter to specify which
504        channels should be excluded.
505
506        Parameters:
507
508            stat:    'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
509                     'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
510
511            mask:    an optional mask specifying where the statistic
512                     should be determined.
513
514            form:    format string to print statistic values
515
516            row:     row number of spectrum to process.
517                     (default is None: for all rows)
518
519        Example:
520            scan.set_unit('channel')
521            msk = scan.create_mask([100, 200], [500, 600])
522            scan.stats(stat='mean', mask=m)
523
524        """
525        mask = mask or []
526        if not self._check_ifs():
527            raise ValueError("Cannot apply mask as the IFs have different "
528                             "number of channels. Please use setselection() "
529                             "to select individual IFs")
530        rtnabc = False
531        if stat.lower().endswith('_abc'): rtnabc = True
532        getchan = False
533        if stat.lower().startswith('min') or stat.lower().startswith('max'):
534            chan = self._math._minmaxchan(self, mask, stat)
535            getchan = True
536            statvals = []
537        if not rtnabc:
538            if row == None:
539                statvals = self._math._stats(self, mask, stat)
540            else:
541                statvals = self._math._statsrow(self, mask, stat, int(row))
542
543        #def cb(i):
544        #    return statvals[i]
545
546        #return self._row_callback(cb, stat)
547
548        label=stat
549        #callback=cb
550        out = ""
551        #outvec = []
552        sep = '-'*50
553
554        if row == None:
555            rows = xrange(self.nrow())
556        elif isinstance(row, int):
557            rows = [ row ]
558
559        for i in rows:
560            refstr = ''
561            statunit= ''
562            if getchan:
563                qx, qy = self.chan2data(rowno=i, chan=chan[i])
564                if rtnabc:
565                    statvals.append(qx['value'])
566                    refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
567                    statunit= '['+qx['unit']+']'
568                else:
569                    refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
570
571            tm = self._gettime(i)
572            src = self._getsourcename(i)
573            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
574            out += 'Time[%s]:\n' % (tm)
575            if self.nbeam(-1) > 1: out +=  ' Beam[%d] ' % (self.getbeam(i))
576            if self.nif(-1) > 1:   out +=  ' IF[%d] ' % (self.getif(i))
577            if self.npol(-1) > 1:  out +=  ' Pol[%d] ' % (self.getpol(i))
578            #outvec.append(callback(i))
579            if len(rows) > 1:
580                # out += ('= %'+form) % (outvec[i]) +'   '+refstr+'\n'
581                out += ('= %'+form) % (statvals[i]) +'   '+refstr+'\n'
582            else:
583                # out += ('= %'+form) % (outvec[0]) +'   '+refstr+'\n'
584                out += ('= %'+form) % (statvals[0]) +'   '+refstr+'\n'
585            out +=  sep+"\n"
586
587        import os
588        if os.environ.has_key( 'USER' ):
589            usr = os.environ['USER']
590        else:
591            import commands
592            usr = commands.getoutput( 'whoami' )
593        tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
594        f = open(tmpfile,'w')
595        print >> f, sep
596        print >> f, ' %s %s' % (label, statunit)
597        print >> f, sep
598        print >> f, out
599        f.close()
600        f = open(tmpfile,'r')
601        x = f.readlines()
602        f.close()
603        asaplog.push(''.join(x), False)
604
605        return statvals
606
607    def chan2data(self, rowno=0, chan=0):
608        """\
609        Returns channel/frequency/velocity and spectral value
610        at an arbitrary row and channel in the scantable.
611
612        Parameters:
613
614            rowno:   a row number in the scantable. Default is the
615                     first row, i.e. rowno=0
616
617            chan:    a channel in the scantable. Default is the first
618                     channel, i.e. pos=0
619
620        """
621        if isinstance(rowno, int) and isinstance(chan, int):
622            qx = {'unit': self.get_unit(),
623                  'value': self._getabcissa(rowno)[chan]}
624            qy = {'unit': self.get_fluxunit(),
625                  'value': self._getspectrum(rowno)[chan]}
626            return qx, qy
627
628    def stddev(self, mask=None):
629        """\
630        Determine the standard deviation of the current beam/if/pol
631        Takes a 'mask' as an optional parameter to specify which
632        channels should be excluded.
633
634        Parameters:
635
636            mask:    an optional mask specifying where the standard
637                     deviation should be determined.
638
639        Example::
640
641            scan.set_unit('channel')
642            msk = scan.create_mask([100, 200], [500, 600])
643            scan.stddev(mask=m)
644
645        """
646        return self.stats(stat='stddev', mask=mask);
647
648
649    def get_column_names(self):
650        """\
651        Return a  list of column names, which can be used for selection.
652        """
653        return list(Scantable.get_column_names(self))
654
655    def get_tsys(self, row=-1):
656        """\
657        Return the System temperatures.
658
659        Parameters:
660
661            row:    the rowno to get the information for. (default all rows)
662
663        Returns:
664
665            a list of Tsys values for the current selection
666
667        """
668        if row > -1:
669            return self._get_column(self._gettsys, row)
670        return self._row_callback(self._gettsys, "Tsys")
671
672
673    def get_weather(self, row=-1):
674        """\
675        Return the weather informations.
676
677        Parameters:
678
679            row:    the rowno to get the information for. (default all rows)
680
681        Returns:
682
683            a dict or list of of dicts of values for the current selection
684
685        """
686
687        values = self._get_column(self._get_weather, row)
688        if row > -1:
689            return {'temperature': values[0],
690                    'pressure': values[1], 'humidity' : values[2],
691                    'windspeed' : values[3], 'windaz' : values[4]
692                    }
693        else:
694            out = []
695            for r in values:
696
697                out.append({'temperature': r[0],
698                            'pressure': r[1], 'humidity' : r[2],
699                            'windspeed' : r[3], 'windaz' : r[4]
700                    })
701            return out
702
703    def _row_callback(self, callback, label):
704        out = ""
705        outvec = []
706        sep = '-'*50
707        for i in range(self.nrow()):
708            tm = self._gettime(i)
709            src = self._getsourcename(i)
710            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
711            out += 'Time[%s]:\n' % (tm)
712            if self.nbeam(-1) > 1:
713                out +=  ' Beam[%d] ' % (self.getbeam(i))
714            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (self.getif(i))
715            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (self.getpol(i))
716            outvec.append(callback(i))
717            out += '= %3.3f\n' % (outvec[i])
718            out +=  sep+'\n'
719
720        asaplog.push(sep)
721        asaplog.push(" %s" % (label))
722        asaplog.push(sep)
723        asaplog.push(out)
724        asaplog.post()
725        return outvec
726
727    def _get_column(self, callback, row=-1):
728        """
729        """
730        if row == -1:
731            return [callback(i) for i in range(self.nrow())]
732        else:
733            if  0 <= row < self.nrow():
734                return callback(row)
735
736
737    def get_time(self, row=-1, asdatetime=False):
738        """\
739        Get a list of time stamps for the observations.
740        Return a datetime object for each integration time stamp in the scantable.
741
742        Parameters:
743
744            row:          row no of integration. Default -1 return all rows
745
746            asdatetime:   return values as datetime objects rather than strings
747
748        """
749        from time import strptime
750        from datetime import datetime
751        times = self._get_column(self._gettime, row)
752        if not asdatetime:
753            return times
754        format = "%Y/%m/%d/%H:%M:%S"
755        if isinstance(times, list):
756            return [datetime(*strptime(i, format)[:6]) for i in times]
757        else:
758            return datetime(*strptime(times, format)[:6])
759
760
761    def get_inttime(self, row=-1):
762        """\
763        Get a list of integration times for the observations.
764        Return a time in seconds for each integration in the scantable.
765
766        Parameters:
767
768            row:    row no of integration. Default -1 return all rows.
769
770        """
771        return self._get_column(self._getinttime, row)
772
773
774    def get_sourcename(self, row=-1):
775        """\
776        Get a list source names for the observations.
777        Return a string for each integration in the scantable.
778        Parameters:
779
780            row:    row no of integration. Default -1 return all rows.
781
782        """
783        return self._get_column(self._getsourcename, row)
784
785    def get_elevation(self, row=-1):
786        """\
787        Get a list of elevations for the observations.
788        Return a float for each integration in the scantable.
789
790        Parameters:
791
792            row:    row no of integration. Default -1 return all rows.
793
794        """
795        return self._get_column(self._getelevation, row)
796
797    def get_azimuth(self, row=-1):
798        """\
799        Get a list of azimuths for the observations.
800        Return a float for each integration in the scantable.
801
802        Parameters:
803            row:    row no of integration. Default -1 return all rows.
804
805        """
806        return self._get_column(self._getazimuth, row)
807
808    def get_parangle(self, row=-1):
809        """\
810        Get a list of parallactic angles for the observations.
811        Return a float for each integration in the scantable.
812
813        Parameters:
814
815            row:    row no of integration. Default -1 return all rows.
816
817        """
818        return self._get_column(self._getparangle, row)
819
820    def get_direction(self, row=-1):
821        """
822        Get a list of Positions on the sky (direction) for the observations.
823        Return a string for each integration in the scantable.
824
825        Parameters:
826
827            row:    row no of integration. Default -1 return all rows
828
829        """
830        return self._get_column(self._getdirection, row)
831
832    def get_directionval(self, row=-1):
833        """\
834        Get a list of Positions on the sky (direction) for the observations.
835        Return a float for each integration in the scantable.
836
837        Parameters:
838
839            row:    row no of integration. Default -1 return all rows
840
841        """
842        return self._get_column(self._getdirectionvec, row)
843
844    @asaplog_post_dec
845    def set_unit(self, unit='channel'):
846        """\
847        Set the unit for all following operations on this scantable
848
849        Parameters:
850
851            unit:    optional unit, default is 'channel'. Use one of '*Hz',
852                     'km/s', 'channel' or equivalent ''
853
854        """
855        varlist = vars()
856        if unit in ['', 'pixel', 'channel']:
857            unit = ''
858        inf = list(self._getcoordinfo())
859        inf[0] = unit
860        self._setcoordinfo(inf)
861        self._add_history("set_unit", varlist)
862
863    @asaplog_post_dec
864    def set_instrument(self, instr):
865        """\
866        Set the instrument for subsequent processing.
867
868        Parameters:
869
870            instr:    Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
871                      'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
872
873        """
874        self._setInstrument(instr)
875        self._add_history("set_instument", vars())
876
877    @asaplog_post_dec
878    def set_feedtype(self, feedtype):
879        """\
880        Overwrite the feed type, which might not be set correctly.
881
882        Parameters:
883
884            feedtype:     'linear' or 'circular'
885
886        """
887        self._setfeedtype(feedtype)
888        self._add_history("set_feedtype", vars())
889
890    @asaplog_post_dec
891    def set_doppler(self, doppler='RADIO'):
892        """\
893        Set the doppler for all following operations on this scantable.
894
895        Parameters:
896
897            doppler:    One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
898
899        """
900        varlist = vars()
901        inf = list(self._getcoordinfo())
902        inf[2] = doppler
903        self._setcoordinfo(inf)
904        self._add_history("set_doppler", vars())
905
906    @asaplog_post_dec
907    def set_freqframe(self, frame=None):
908        """\
909        Set the frame type of the Spectral Axis.
910
911        Parameters:
912
913            frame:   an optional frame type, default 'LSRK'. Valid frames are:
914                     'TOPO', 'LSRD', 'LSRK', 'BARY',
915                     'GEO', 'GALACTO', 'LGROUP', 'CMB'
916
917        Example::
918
919            scan.set_freqframe('BARY')
920
921        """
922        frame = frame or rcParams['scantable.freqframe']
923        varlist = vars()
924        # "REST" is not implemented in casacore
925        #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
926        #           'GEO', 'GALACTO', 'LGROUP', 'CMB']
927        valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
928                   'GEO', 'GALACTO', 'LGROUP', 'CMB']
929
930        if frame in valid:
931            inf = list(self._getcoordinfo())
932            inf[1] = frame
933            self._setcoordinfo(inf)
934            self._add_history("set_freqframe", varlist)
935        else:
936            msg  = "Please specify a valid freq type. Valid types are:\n", valid
937            raise TypeError(msg)
938
939    @asaplog_post_dec
940    def set_dirframe(self, frame=""):
941        """\
942        Set the frame type of the Direction on the sky.
943
944        Parameters:
945
946            frame:   an optional frame type, default ''. Valid frames are:
947                     'J2000', 'B1950', 'GALACTIC'
948
949        Example:
950
951            scan.set_dirframe('GALACTIC')
952
953        """
954        varlist = vars()
955        Scantable.set_dirframe(self, frame)
956        self._add_history("set_dirframe", varlist)
957
958    def get_unit(self):
959        """\
960        Get the default unit set in this scantable
961
962        Returns:
963
964            A unit string
965
966        """
967        inf = self._getcoordinfo()
968        unit = inf[0]
969        if unit == '': unit = 'channel'
970        return unit
971
972    @asaplog_post_dec
973    def get_abcissa(self, rowno=0):
974        """\
975        Get the abcissa in the current coordinate setup for the currently
976        selected Beam/IF/Pol
977
978        Parameters:
979
980            rowno:    an optional row number in the scantable. Default is the
981                      first row, i.e. rowno=0
982
983        Returns:
984
985            The abcissa values and the format string (as a dictionary)
986
987        """
988        abc = self._getabcissa(rowno)
989        lbl = self._getabcissalabel(rowno)
990        return abc, lbl
991
992    @asaplog_post_dec
993    def flag(self, mask=None, unflag=False):
994        """\
995        Flag the selected data using an optional channel mask.
996
997        Parameters:
998
999            mask:   an optional channel mask, created with create_mask. Default
1000                    (no mask) is all channels.
1001
1002            unflag:    if True, unflag the data
1003
1004        """
1005        varlist = vars()
1006        mask = mask or []
1007        self._flag(mask, unflag)
1008        self._add_history("flag", varlist)
1009
1010    @asaplog_post_dec
1011    def flag_row(self, rows=[], unflag=False):
1012        """\
1013        Flag the selected data in row-based manner.
1014
1015        Parameters:
1016
1017            rows:   list of row numbers to be flagged. Default is no row
1018                    (must be explicitly specified to execute row-based flagging).
1019
1020            unflag: if True, unflag the data.
1021
1022        """
1023        varlist = vars()
1024        self._flag_row(rows, unflag)
1025        self._add_history("flag_row", varlist)
1026
1027    @asaplog_post_dec
1028    def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
1029        """\
1030        Flag the selected data outside a specified range (in channel-base)
1031
1032        Parameters:
1033
1034            uthres:      upper threshold.
1035
1036            dthres:      lower threshold
1037
1038            clipoutside: True for flagging data outside the range [dthres:uthres].
1039                         False for glagging data inside the range.
1040
1041            unflag:      if True, unflag the data.
1042
1043        """
1044        varlist = vars()
1045        self._clip(uthres, dthres, clipoutside, unflag)
1046        self._add_history("clip", varlist)
1047
1048    @asaplog_post_dec
1049    def lag_flag(self, start, end, unit="MHz", insitu=None):
1050        """\
1051        Flag the data in 'lag' space by providing a frequency to remove.
1052        Flagged data in the scantable gets interpolated over the region.
1053        No taper is applied.
1054
1055        Parameters:
1056
1057            start:    the start frequency (really a period within the
1058                      bandwidth)  or period to remove
1059
1060            end:      the end frequency or period to remove
1061
1062            unit:     the frequency unit (default "MHz") or "" for
1063                      explicit lag channels
1064
1065        *Notes*:
1066
1067            It is recommended to flag edges of the band or strong
1068            signals beforehand.
1069
1070        """
1071        if insitu is None: insitu = rcParams['insitu']
1072        self._math._setinsitu(insitu)
1073        varlist = vars()
1074        base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1075        if not (unit == "" or base.has_key(unit)):
1076            raise ValueError("%s is not a valid unit." % unit)
1077        if unit == "":
1078            s = scantable(self._math._lag_flag(self, start, end, "lags"))
1079        else:
1080            s = scantable(self._math._lag_flag(self, start*base[unit],
1081                                               end*base[unit], "frequency"))
1082        s._add_history("lag_flag", varlist)
1083        if insitu:
1084            self._assign(s)
1085        else:
1086            return s
1087
1088    @asaplog_post_dec
1089    def create_mask(self, *args, **kwargs):
1090        """\
1091        Compute and return a mask based on [min, max] windows.
1092        The specified windows are to be INCLUDED, when the mask is
1093        applied.
1094
1095        Parameters:
1096
1097            [min, max], [min2, max2], ...
1098                Pairs of start/end points (inclusive)specifying the regions
1099                to be masked
1100
1101            invert:     optional argument. If specified as True,
1102                        return an inverted mask, i.e. the regions
1103                        specified are EXCLUDED
1104
1105            row:        create the mask using the specified row for
1106                        unit conversions, default is row=0
1107                        only necessary if frequency varies over rows.
1108
1109        Examples::
1110
1111            scan.set_unit('channel')
1112            # a)
1113            msk = scan.create_mask([400, 500], [800, 900])
1114            # masks everything outside 400 and 500
1115            # and 800 and 900 in the unit 'channel'
1116
1117            # b)
1118            msk = scan.create_mask([400, 500], [800, 900], invert=True)
1119            # masks the regions between 400 and 500
1120            # and 800 and 900 in the unit 'channel'
1121
1122            # c)
1123            #mask only channel 400
1124            msk =  scan.create_mask([400])
1125
1126        """
1127        row = kwargs.get("row", 0)
1128        data = self._getabcissa(row)
1129        u = self._getcoordinfo()[0]
1130        if u == "":
1131            u = "channel"
1132        msg = "The current mask window unit is %s" % u
1133        i = self._check_ifs()
1134        if not i:
1135            msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1136        asaplog.push(msg)
1137        n = self.nchan()
1138        msk = _n_bools(n, False)
1139        # test if args is a 'list' or a 'normal *args - UGLY!!!
1140
1141        ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1142             and args or args[0]
1143        for window in ws:
1144            if len(window) == 1:
1145                window = [window[0], window[0]]
1146            if len(window) == 0 or  len(window) > 2:
1147                raise ValueError("A window needs to be defined as [start(, end)]")
1148            if window[0] > window[1]:
1149                tmp = window[0]
1150                window[0] = window[1]
1151                window[1] = tmp
1152            for i in range(n):
1153                if data[i] >= window[0] and data[i] <= window[1]:
1154                    msk[i] = True
1155        if kwargs.has_key('invert'):
1156            if kwargs.get('invert'):
1157                msk = mask_not(msk)
1158        return msk
1159
1160    def get_masklist(self, mask=None, row=0, silent=False):
1161        """\
1162        Compute and return a list of mask windows, [min, max].
1163
1164        Parameters:
1165
1166            mask:       channel mask, created with create_mask.
1167
1168            row:        calcutate the masklist using the specified row
1169                        for unit conversions, default is row=0
1170                        only necessary if frequency varies over rows.
1171
1172        Returns:
1173
1174            [min, max], [min2, max2], ...
1175                Pairs of start/end points (inclusive)specifying
1176                the masked regions
1177
1178        """
1179        if not (isinstance(mask,list) or isinstance(mask, tuple)):
1180            raise TypeError("The mask should be list or tuple.")
1181        if len(mask) < 2:
1182            raise TypeError("The mask elements should be > 1")
1183        if self.nchan() != len(mask):
1184            msg = "Number of channels in scantable != number of mask elements"
1185            raise TypeError(msg)
1186        data = self._getabcissa(row)
1187        u = self._getcoordinfo()[0]
1188        if u == "":
1189            u = "channel"
1190        msg = "The current mask window unit is %s" % u
1191        i = self._check_ifs()
1192        if not i:
1193            msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1194        if not silent:
1195            asaplog.push(msg)
1196        masklist=[]
1197        ist, ien = None, None
1198        ist, ien=self.get_mask_indices(mask)
1199        if ist is not None and ien is not None:
1200            for i in xrange(len(ist)):
1201                range=[data[ist[i]],data[ien[i]]]
1202                range.sort()
1203                masklist.append([range[0],range[1]])
1204        return masklist
1205
1206    def get_mask_indices(self, mask=None):
1207        """\
1208        Compute and Return lists of mask start indices and mask end indices.
1209
1210        Parameters:
1211
1212            mask:       channel mask, created with create_mask.
1213
1214        Returns:
1215
1216            List of mask start indices and that of mask end indices,
1217            i.e., [istart1,istart2,....], [iend1,iend2,....].
1218
1219        """
1220        if not (isinstance(mask,list) or isinstance(mask, tuple)):
1221            raise TypeError("The mask should be list or tuple.")
1222        if len(mask) < 2:
1223            raise TypeError("The mask elements should be > 1")
1224        istart=[]
1225        iend=[]
1226        if mask[0]: istart.append(0)
1227        for i in range(len(mask)-1):
1228            if not mask[i] and mask[i+1]:
1229                istart.append(i+1)
1230            elif mask[i] and not mask[i+1]:
1231                iend.append(i)
1232        if mask[len(mask)-1]: iend.append(len(mask)-1)
1233        if len(istart) != len(iend):
1234            raise RuntimeError("Numbers of mask start != mask end.")
1235        for i in range(len(istart)):
1236            if istart[i] > iend[i]:
1237                raise RuntimeError("Mask start index > mask end index")
1238                break
1239        return istart,iend
1240
1241#    def get_restfreqs(self):
1242#        """
1243#        Get the restfrequency(s) stored in this scantable.
1244#        The return value(s) are always of unit 'Hz'
1245#        Parameters:
1246#            none
1247#        Returns:
1248#            a list of doubles
1249#        """
1250#        return list(self._getrestfreqs())
1251
1252    def get_restfreqs(self, ids=None):
1253        """\
1254        Get the restfrequency(s) stored in this scantable.
1255        The return value(s) are always of unit 'Hz'
1256
1257        Parameters:
1258
1259            ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1260                 be retrieved
1261
1262        Returns:
1263
1264            dictionary containing ids and a list of doubles for each id
1265
1266        """
1267        if ids is None:
1268            rfreqs={}
1269            idlist = self.getmolnos()
1270            for i in idlist:
1271                rfreqs[i]=list(self._getrestfreqs(i))
1272            return rfreqs
1273        else:
1274            if type(ids)==list or type(ids)==tuple:
1275                rfreqs={}
1276                for i in ids:
1277                    rfreqs[i]=list(self._getrestfreqs(i))
1278                return rfreqs
1279            else:
1280                return list(self._getrestfreqs(ids))
1281            #return list(self._getrestfreqs(ids))
1282
1283    def set_restfreqs(self, freqs=None, unit='Hz'):
1284        """\
1285        Set or replace the restfrequency specified and
1286        If the 'freqs' argument holds a scalar,
1287        then that rest frequency will be applied to all the selected
1288        data.  If the 'freqs' argument holds
1289        a vector, then it MUST be of equal or smaller length than
1290        the number of IFs (and the available restfrequencies will be
1291        replaced by this vector).  In this case, *all* data have
1292        the restfrequency set per IF according
1293        to the corresponding value you give in the 'freqs' vector.
1294        E.g. 'freqs=[1e9, 2e9]'  would mean IF 0 gets restfreq 1e9 and
1295        IF 1 gets restfreq 2e9.
1296
1297        You can also specify the frequencies via a linecatalog.
1298
1299        Parameters:
1300
1301            freqs:   list of rest frequency values or string idenitfiers
1302
1303            unit:    unit for rest frequency (default 'Hz')
1304
1305
1306        Example::
1307
1308            # set the given restfrequency for the all currently selected IFs
1309            scan.set_restfreqs(freqs=1.4e9)
1310            # set restfrequencies for the n IFs  (n > 1) in the order of the
1311            # list, i.e
1312            # IF0 -> 1.4e9, IF1 ->  1.41e9, IF3 -> 1.42e9
1313            # len(list_of_restfreqs) == nIF
1314            # for nIF == 1 the following will set multiple restfrequency for
1315            # that IF
1316            scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
1317            # set multiple restfrequencies per IF. as a list of lists where
1318            # the outer list has nIF elements, the inner s arbitrary
1319            scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
1320
1321       *Note*:
1322
1323            To do more sophisticate Restfrequency setting, e.g. on a
1324            source and IF basis, use scantable.set_selection() before using
1325            this function::
1326
1327                # provided your scantable is called scan
1328                selection = selector()
1329                selection.set_name("ORION*")
1330                selection.set_ifs([1])
1331                scan.set_selection(selection)
1332                scan.set_restfreqs(freqs=86.6e9)
1333
1334        """
1335        varlist = vars()
1336        from asap import linecatalog
1337        # simple  value
1338        if isinstance(freqs, int) or isinstance(freqs, float):
1339            self._setrestfreqs([freqs], [""], unit)
1340        # list of values
1341        elif isinstance(freqs, list) or isinstance(freqs, tuple):
1342            # list values are scalars
1343            if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
1344                if len(freqs) == 1:
1345                    self._setrestfreqs(freqs, [""], unit)
1346                else:
1347                    # allow the 'old' mode of setting mulitple IFs
1348                    sel = selector()
1349                    savesel = self._getselection()
1350                    iflist = self.getifnos()
1351                    if len(freqs)>len(iflist):
1352                        raise ValueError("number of elements in list of list "
1353                                         "exeeds the current IF selections")
1354                    iflist = self.getifnos()
1355                    for i, fval in enumerate(freqs):
1356                        sel.set_ifs(iflist[i])
1357                        self._setselection(sel)
1358                        self._setrestfreqs([fval], [""], unit)
1359                    self._setselection(savesel)
1360
1361            # list values are dict, {'value'=, 'name'=)
1362            elif isinstance(freqs[-1], dict):
1363                values = []
1364                names = []
1365                for d in freqs:
1366                    values.append(d["value"])
1367                    names.append(d["name"])
1368                self._setrestfreqs(values, names, unit)
1369            elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
1370                sel = selector()
1371                savesel = self._getselection()
1372                iflist = self.getifnos()
1373                if len(freqs)>len(iflist):
1374                    raise ValueError("number of elements in list of list exeeds"
1375                                     " the current IF selections")
1376                for i, fval in enumerate(freqs):
1377                    sel.set_ifs(iflist[i])
1378                    self._setselection(sel)
1379                    self._setrestfreqs(fval, [""], unit)
1380                self._setselection(savesel)
1381        # freqs are to be taken from a linecatalog
1382        elif isinstance(freqs, linecatalog):
1383            sel = selector()
1384            savesel = self._getselection()
1385            for i in xrange(freqs.nrow()):
1386                sel.set_ifs(iflist[i])
1387                self._setselection(sel)
1388                self._setrestfreqs([freqs.get_frequency(i)],
1389                                   [freqs.get_name(i)], "MHz")
1390                # ensure that we are not iterating past nIF
1391                if i == self.nif()-1: break
1392            self._setselection(savesel)
1393        else:
1394            return
1395        self._add_history("set_restfreqs", varlist)
1396
1397    def shift_refpix(self, delta):
1398        """\
1399        Shift the reference pixel of the Spectra Coordinate by an
1400        integer amount.
1401
1402        Parameters:
1403
1404            delta:   the amount to shift by
1405
1406        *Note*:
1407
1408            Be careful using this with broadband data.
1409
1410        """
1411        Scantable.shift_refpix(self, delta)
1412
1413    @asaplog_post_dec
1414    def history(self, filename=None):
1415        """\
1416        Print the history. Optionally to a file.
1417
1418        Parameters:
1419
1420            filename:    The name  of the file to save the history to.
1421
1422        """
1423        hist = list(self._gethistory())
1424        out = "-"*80
1425        for h in hist:
1426            if h.startswith("---"):
1427                out = "\n".join([out, h])
1428            else:
1429                items = h.split("##")
1430                date = items[0]
1431                func = items[1]
1432                items = items[2:]
1433                out += "\n"+date+"\n"
1434                out += "Function: %s\n  Parameters:" % (func)
1435                for i in items:
1436                    s = i.split("=")
1437                    out += "\n   %s = %s" % (s[0], s[1])
1438                out = "\n".join([out, "-"*80])
1439        if filename is not None:
1440            if filename is "":
1441                filename = 'scantable_history.txt'
1442            import os
1443            filename = os.path.expandvars(os.path.expanduser(filename))
1444            if not os.path.isdir(filename):
1445                data = open(filename, 'w')
1446                data.write(out)
1447                data.close()
1448            else:
1449                msg = "Illegal file name '%s'." % (filename)
1450                raise IOError(msg)
1451        return page(out)
1452    #
1453    # Maths business
1454    #
1455    @asaplog_post_dec
1456    def average_time(self, mask=None, scanav=False, weight='tint', align=False):
1457        """\
1458        Return the (time) weighted average of a scan.
1459
1460        *Note*:
1461
1462            in channels only - align if necessary
1463
1464        Parameters:
1465
1466            mask:     an optional mask (only used for 'var' and 'tsys'
1467                      weighting)
1468
1469            scanav:   True averages each scan separately
1470                      False (default) averages all scans together,
1471
1472            weight:   Weighting scheme.
1473                      'none'     (mean no weight)
1474                      'var'      (1/var(spec) weighted)
1475                      'tsys'     (1/Tsys**2 weighted)
1476                      'tint'     (integration time weighted)
1477                      'tintsys'  (Tint/Tsys**2)
1478                      'median'   ( median averaging)
1479                      The default is 'tint'
1480
1481            align:    align the spectra in velocity before averaging. It takes
1482                      the time of the first spectrum as reference time.
1483
1484        Example::
1485
1486            # time average the scantable without using a mask
1487            newscan = scan.average_time()
1488
1489        """
1490        varlist = vars()
1491        weight = weight or 'TINT'
1492        mask = mask or ()
1493        scanav = (scanav and 'SCAN') or 'NONE'
1494        scan = (self, )
1495
1496        if align:
1497            scan = (self.freq_align(insitu=False), )
1498        s = None
1499        if weight.upper() == 'MEDIAN':
1500            s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1501                                                     scanav))
1502        else:
1503            s = scantable(self._math._average(scan, mask, weight.upper(),
1504                          scanav))
1505        s._add_history("average_time", varlist)
1506        return s
1507
1508    @asaplog_post_dec
1509    def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
1510        """\
1511        Return a scan where all spectra are converted to either
1512        Jansky or Kelvin depending upon the flux units of the scan table.
1513        By default the function tries to look the values up internally.
1514        If it can't find them (or if you want to over-ride), you must
1515        specify EITHER jyperk OR eta (and D which it will try to look up
1516        also if you don't set it). jyperk takes precedence if you set both.
1517
1518        Parameters:
1519
1520            jyperk:      the Jy / K conversion factor
1521
1522            eta:         the aperture efficiency
1523
1524            d:           the geomtric diameter (metres)
1525
1526            insitu:      if False a new scantable is returned.
1527                         Otherwise, the scaling is done in-situ
1528                         The default is taken from .asaprc (False)
1529
1530        """
1531        if insitu is None: insitu = rcParams['insitu']
1532        self._math._setinsitu(insitu)
1533        varlist = vars()
1534        jyperk = jyperk or -1.0
1535        d = d or -1.0
1536        eta = eta or -1.0
1537        s = scantable(self._math._convertflux(self, d, eta, jyperk))
1538        s._add_history("convert_flux", varlist)
1539        if insitu: self._assign(s)
1540        else: return s
1541
1542    @asaplog_post_dec
1543    def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1544        """\
1545        Return a scan after applying a gain-elevation correction.
1546        The correction can be made via either a polynomial or a
1547        table-based interpolation (and extrapolation if necessary).
1548        You specify polynomial coefficients, an ascii table or neither.
1549        If you specify neither, then a polynomial correction will be made
1550        with built in coefficients known for certain telescopes (an error
1551        will occur if the instrument is not known).
1552        The data and Tsys are *divided* by the scaling factors.
1553
1554        Parameters:
1555
1556            poly:        Polynomial coefficients (default None) to compute a
1557                         gain-elevation correction as a function of
1558                         elevation (in degrees).
1559
1560            filename:    The name of an ascii file holding correction factors.
1561                         The first row of the ascii file must give the column
1562                         names and these MUST include columns
1563                         "ELEVATION" (degrees) and "FACTOR" (multiply data
1564                         by this) somewhere.
1565                         The second row must give the data type of the
1566                         column. Use 'R' for Real and 'I' for Integer.
1567                         An example file would be
1568                         (actual factors are arbitrary) :
1569
1570                         TIME ELEVATION FACTOR
1571                         R R R
1572                         0.1 0 0.8
1573                         0.2 20 0.85
1574                         0.3 40 0.9
1575                         0.4 60 0.85
1576                         0.5 80 0.8
1577                         0.6 90 0.75
1578
1579            method:      Interpolation method when correcting from a table.
1580                         Values are  "nearest", "linear" (default), "cubic"
1581                         and "spline"
1582
1583            insitu:      if False a new scantable is returned.
1584                         Otherwise, the scaling is done in-situ
1585                         The default is taken from .asaprc (False)
1586
1587        """
1588
1589        if insitu is None: insitu = rcParams['insitu']
1590        self._math._setinsitu(insitu)
1591        varlist = vars()
1592        poly = poly or ()
1593        from os.path import expandvars
1594        filename = expandvars(filename)
1595        s = scantable(self._math._gainel(self, poly, filename, method))
1596        s._add_history("gain_el", varlist)
1597        if insitu:
1598            self._assign(s)
1599        else:
1600            return s
1601
1602    @asaplog_post_dec
1603    def freq_align(self, reftime=None, method='cubic', insitu=None):
1604        """\
1605        Return a scan where all rows have been aligned in frequency/velocity.
1606        The alignment frequency frame (e.g. LSRK) is that set by function
1607        set_freqframe.
1608
1609        Parameters:
1610
1611            reftime:     reference time to align at. By default, the time of
1612                         the first row of data is used.
1613
1614            method:      Interpolation method for regridding the spectra.
1615                         Choose from "nearest", "linear", "cubic" (default)
1616                         and "spline"
1617
1618            insitu:      if False a new scantable is returned.
1619                         Otherwise, the scaling is done in-situ
1620                         The default is taken from .asaprc (False)
1621
1622        """
1623        if insitu is None: insitu = rcParams["insitu"]
1624        self._math._setinsitu(insitu)
1625        varlist = vars()
1626        reftime = reftime or ""
1627        s = scantable(self._math._freq_align(self, reftime, method))
1628        s._add_history("freq_align", varlist)
1629        if insitu: self._assign(s)
1630        else: return s
1631
1632    @asaplog_post_dec
1633    def opacity(self, tau=None, insitu=None):
1634        """\
1635        Apply an opacity correction. The data
1636        and Tsys are multiplied by the correction factor.
1637
1638        Parameters:
1639
1640            tau:         (list of) opacity from which the correction factor is
1641                         exp(tau*ZD)
1642                         where ZD is the zenith-distance.
1643                         If a list is provided, it has to be of length nIF,
1644                         nIF*nPol or 1 and in order of IF/POL, e.g.
1645                         [opif0pol0, opif0pol1, opif1pol0 ...]
1646                         if tau is `None` the opacities are determined from a
1647                         model.
1648
1649            insitu:      if False a new scantable is returned.
1650                         Otherwise, the scaling is done in-situ
1651                         The default is taken from .asaprc (False)
1652
1653        """
1654        if insitu is None: insitu = rcParams['insitu']
1655        self._math._setinsitu(insitu)
1656        varlist = vars()
1657        if not hasattr(tau, "__len__"):
1658            tau = [tau]
1659        s = scantable(self._math._opacity(self, tau))
1660        s._add_history("opacity", varlist)
1661        if insitu: self._assign(s)
1662        else: return s
1663
1664    @asaplog_post_dec
1665    def bin(self, width=5, insitu=None):
1666        """\
1667        Return a scan where all spectra have been binned up.
1668
1669        Parameters:
1670
1671            width:       The bin width (default=5) in pixels
1672
1673            insitu:      if False a new scantable is returned.
1674                         Otherwise, the scaling is done in-situ
1675                         The default is taken from .asaprc (False)
1676
1677        """
1678        if insitu is None: insitu = rcParams['insitu']
1679        self._math._setinsitu(insitu)
1680        varlist = vars()
1681        s = scantable(self._math._bin(self, width))
1682        s._add_history("bin", varlist)
1683        if insitu:
1684            self._assign(s)
1685        else:
1686            return s
1687
1688    @asaplog_post_dec
1689    def resample(self, width=5, method='cubic', insitu=None):
1690        """\
1691        Return a scan where all spectra have been binned up.
1692
1693        Parameters:
1694
1695            width:       The bin width (default=5) in pixels
1696
1697            method:      Interpolation method when correcting from a table.
1698                         Values are  "nearest", "linear", "cubic" (default)
1699                         and "spline"
1700
1701            insitu:      if False a new scantable is returned.
1702                         Otherwise, the scaling is done in-situ
1703                         The default is taken from .asaprc (False)
1704
1705        """
1706        if insitu is None: insitu = rcParams['insitu']
1707        self._math._setinsitu(insitu)
1708        varlist = vars()
1709        s = scantable(self._math._resample(self, method, width))
1710        s._add_history("resample", varlist)
1711        if insitu: self._assign(s)
1712        else: return s
1713
1714    @asaplog_post_dec
1715    def average_pol(self, mask=None, weight='none'):
1716        """\
1717        Average the Polarisations together.
1718
1719        Parameters:
1720
1721            mask:        An optional mask defining the region, where the
1722                         averaging will be applied. The output will have all
1723                         specified points masked.
1724
1725            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1726                         weighted), or 'tsys' (1/Tsys**2 weighted)
1727
1728        """
1729        varlist = vars()
1730        mask = mask or ()
1731        s = scantable(self._math._averagepol(self, mask, weight.upper()))
1732        s._add_history("average_pol", varlist)
1733        return s
1734
1735    @asaplog_post_dec
1736    def average_beam(self, mask=None, weight='none'):
1737        """\
1738        Average the Beams together.
1739
1740        Parameters:
1741            mask:        An optional mask defining the region, where the
1742                         averaging will be applied. The output will have all
1743                         specified points masked.
1744
1745            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1746                         weighted), or 'tsys' (1/Tsys**2 weighted)
1747
1748        """
1749        varlist = vars()
1750        mask = mask or ()
1751        s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1752        s._add_history("average_beam", varlist)
1753        return s
1754
1755    def parallactify(self, pflag):
1756        """\
1757        Set a flag to indicate whether this data should be treated as having
1758        been 'parallactified' (total phase == 0.0)
1759
1760        Parameters:
1761
1762            pflag:  Bool indicating whether to turn this on (True) or
1763                    off (False)
1764
1765        """
1766        varlist = vars()
1767        self._parallactify(pflag)
1768        self._add_history("parallactify", varlist)
1769
1770    @asaplog_post_dec
1771    def convert_pol(self, poltype=None):
1772        """\
1773        Convert the data to a different polarisation type.
1774        Note that you will need cross-polarisation terms for most conversions.
1775
1776        Parameters:
1777
1778            poltype:    The new polarisation type. Valid types are:
1779                        "linear", "circular", "stokes" and "linpol"
1780
1781        """
1782        varlist = vars()
1783        s = scantable(self._math._convertpol(self, poltype))
1784        s._add_history("convert_pol", varlist)
1785        return s
1786
1787    @asaplog_post_dec
1788    def smooth(self, kernel="hanning", width=5.0, order=2, plot=False, insitu=None):
1789        """\
1790        Smooth the spectrum by the specified kernel (conserving flux).
1791
1792        Parameters:
1793
1794            kernel:     The type of smoothing kernel. Select from
1795                        'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1796                        or 'poly'
1797
1798            width:      The width of the kernel in pixels. For hanning this is
1799                        ignored otherwise it defauls to 5 pixels.
1800                        For 'gaussian' it is the Full Width Half
1801                        Maximum. For 'boxcar' it is the full width.
1802                        For 'rmedian' and 'poly' it is the half width.
1803
1804            order:      Optional parameter for 'poly' kernel (default is 2), to
1805                        specify the order of the polnomial. Ignored by all other
1806                        kernels.
1807
1808            plot:       plot the original and the smoothed spectra.
1809                        In this each indivual fit has to be approved, by
1810                        typing 'y' or 'n'
1811
1812            insitu:     if False a new scantable is returned.
1813                        Otherwise, the scaling is done in-situ
1814                        The default is taken from .asaprc (False)
1815
1816        """
1817        if insitu is None: insitu = rcParams['insitu']
1818        self._math._setinsitu(insitu)
1819        varlist = vars()
1820
1821        if plot: orgscan = self.copy()
1822
1823        s = scantable(self._math._smooth(self, kernel.lower(), width, order))
1824        s._add_history("smooth", varlist)
1825
1826        if plot:
1827            if rcParams['plotter.gui']:
1828                from asap.asaplotgui import asaplotgui as asaplot
1829            else:
1830                from asap.asaplot import asaplot
1831            self._p=asaplot()
1832            self._p.set_panels()
1833            ylab=s._get_ordinate_label()
1834            #self._p.palette(0,["#777777","red"])
1835            for r in xrange(s.nrow()):
1836                xsm=s._getabcissa(r)
1837                ysm=s._getspectrum(r)
1838                xorg=orgscan._getabcissa(r)
1839                yorg=orgscan._getspectrum(r)
1840                self._p.clear()
1841                self._p.hold()
1842                self._p.set_axes('ylabel',ylab)
1843                self._p.set_axes('xlabel',s._getabcissalabel(r))
1844                self._p.set_axes('title',s._getsourcename(r))
1845                self._p.set_line(label='Original',color="#777777")
1846                self._p.plot(xorg,yorg)
1847                self._p.set_line(label='Smoothed',color="red")
1848                self._p.plot(xsm,ysm)
1849                ### Ugly part for legend
1850                for i in [0,1]:
1851                    self._p.subplots[0]['lines'].append([self._p.subplots[0]['axes'].lines[i]])
1852                self._p.release()
1853                ### Ugly part for legend
1854                self._p.subplots[0]['lines']=[]
1855                res = raw_input("Accept smoothing ([y]/n): ")
1856                if res.upper() == 'N':
1857                    s._setspectrum(yorg, r)
1858            self._p.unmap()
1859            self._p = None
1860            del orgscan
1861
1862        if insitu: self._assign(s)
1863        else: return s
1864
1865    @asaplog_post_dec
1866    def old_poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None, rows=None):
1867        """\
1868        Return a scan which has been baselined (all rows) by a polynomial.
1869       
1870        Parameters:
1871
1872            mask:       an optional mask
1873
1874            order:      the order of the polynomial (default is 0)
1875
1876            plot:       plot the fit and the residual. In this each
1877                        indivual fit has to be approved, by typing 'y'
1878                        or 'n'
1879
1880            uselin:     use linear polynomial fit
1881
1882            insitu:     if False a new scantable is returned.
1883                        Otherwise, the scaling is done in-situ
1884                        The default is taken from .asaprc (False)
1885
1886            rows:       row numbers of spectra to be processed.
1887                        (default is None: for all rows)
1888       
1889        Example:
1890            # return a scan baselined by a third order polynomial,
1891            # not using a mask
1892            bscan = scan.poly_baseline(order=3)
1893
1894        """
1895        if insitu is None: insitu = rcParams['insitu']
1896        if not insitu:
1897            workscan = self.copy()
1898        else:
1899            workscan = self
1900        varlist = vars()
1901        if mask is None:
1902            mask = [True for i in xrange(self.nchan())]
1903
1904        try:
1905            f = fitter()
1906            if uselin:
1907                f.set_function(lpoly=order)
1908            else:
1909                f.set_function(poly=order)
1910
1911            if rows == None:
1912                rows = xrange(workscan.nrow())
1913            elif isinstance(rows, int):
1914                rows = [ rows ]
1915           
1916            if len(rows) > 0:
1917                self.blpars = []
1918                self.masklists = []
1919                self.actualmask = []
1920           
1921            for r in rows:
1922                f.x = workscan._getabcissa(r)
1923                f.y = workscan._getspectrum(r)
1924                f.mask = mask_and(mask, workscan._getmask(r))    # (CAS-1434)
1925                f.data = None
1926                f.fit()
1927                if plot:
1928                    f.plot(residual=True)
1929                    x = raw_input("Accept fit ( [y]/n ): ")
1930                    if x.upper() == 'N':
1931                        self.blpars.append(None)
1932                        self.masklists.append(None)
1933                        self.actualmask.append(None)
1934                        continue
1935                workscan._setspectrum(f.fitter.getresidual(), r)
1936                self.blpars.append(f.get_parameters())
1937                self.masklists.append(workscan.get_masklist(f.mask, row=r, silent=True))
1938                self.actualmask.append(f.mask)
1939
1940            if plot:
1941                f._p.unmap()
1942                f._p = None
1943            workscan._add_history("poly_baseline", varlist)
1944            if insitu:
1945                self._assign(workscan)
1946            else:
1947                return workscan
1948        except RuntimeError:
1949            msg = "The fit failed, possibly because it didn't converge."
1950            raise RuntimeError(msg)
1951
1952    @asaplog_post_dec
1953    def poly_baseline(self, mask=None, order=0, plot=False, batch=False,
1954                      insitu=None, rows=None):
1955        """\
1956        Return a scan which has been baselined (all rows) by a polynomial.
1957        Parameters:
1958            mask:       an optional mask
1959            order:      the order of the polynomial (default is 0)
1960            plot:       plot the fit and the residual. In this each
1961                        indivual fit has to be approved, by typing 'y'
1962                        or 'n'. Ignored if batch = True.
1963            batch:      if True a faster algorithm is used and logs
1964                        including the fit results are not output
1965                        (default is False)
1966            insitu:     if False a new scantable is returned.
1967                        Otherwise, the scaling is done in-situ
1968                        The default is taken from .asaprc (False)
1969            rows:       row numbers of spectra to be processed.
1970                        (default is None: for all rows)
1971        Example:
1972            # return a scan baselined by a third order polynomial,
1973            # not using a mask
1974            bscan = scan.poly_baseline(order=3)
1975        """
1976        if insitu is None: insitu = rcParams["insitu"]
1977        varlist = vars()
1978        if insitu:
1979            workscan = self
1980        else:
1981            workscan = self.copy()
1982
1983        nchan = workscan.nchan()
1984       
1985        if mask is None:
1986            mask = [True for i in xrange(nchan)]
1987
1988        try:
1989            if rows == None:
1990                rows = xrange(workscan.nrow())
1991            elif isinstance(rows, int):
1992                rows = [ rows ]
1993           
1994            if len(rows) > 0:
1995                workscan.blpars = []
1996                workscan.masklists = []
1997                workscan.actualmask = []
1998
1999            if batch:
2000                workscan._poly_baseline_batch(mask, order)
2001            elif plot:
2002                f = fitter()
2003                f.set_function(lpoly=order)
2004                for r in rows:
2005                    f.x = workscan._getabcissa(r)
2006                    f.y = workscan._getspectrum(r)
2007                    f.mask = mask_and(mask, workscan._getmask(r))    # (CAS-1434)
2008                    f.data = None
2009                    f.fit()
2010                   
2011                    f.plot(residual=True)
2012                    accept_fit = raw_input("Accept fit ( [y]/n ): ")
2013                    if accept_fit.upper() == "N":
2014                        self.blpars.append(None)
2015                        self.masklists.append(None)
2016                        self.actualmask.append(None)
2017                        continue
2018                    workscan._setspectrum(f.fitter.getresidual(), r)
2019                    workscan.blpars.append(f.get_parameters())
2020                    workscan.masklists.append(workscan.get_masklist(f.mask, row=r))
2021                    workscan.actualmask.append(f.mask)
2022                   
2023                f._p.unmap()
2024                f._p = None
2025            else:
2026                for r in rows:
2027                    fitparams = workscan._poly_baseline(mask, order, r)
2028                    params = fitparams.getparameters()
2029                    fmtd = ", ".join(["p%d = %3.6f" % (i, v) for i, v in enumerate(params)])
2030                    errors = fitparams.geterrors()
2031                    fmask = mask_and(mask, workscan._getmask(r))
2032
2033                    workscan.blpars.append({"params":params,
2034                                            "fixed": fitparams.getfixedparameters(),
2035                                            "formatted":fmtd, "errors":errors})
2036                    workscan.masklists.append(workscan.get_masklist(fmask, r, silent=True))
2037                    workscan.actualmask.append(fmask)
2038                   
2039                    asaplog.push(fmtd)
2040           
2041            workscan._add_history("poly_baseline", varlist)
2042           
2043            if insitu:
2044                self._assign(workscan)
2045            else:
2046                return workscan
2047           
2048        except RuntimeError, e:
2049            msg = "The fit failed, possibly because it didn't converge."
2050            if rcParams["verbose"]:
2051                asaplog.push(str(e))
2052                asaplog.push(str(msg))
2053                return
2054            else:
2055                raise RuntimeError(str(e)+'\n'+msg)
2056
2057
2058    def auto_poly_baseline(self, mask=None, edge=(0, 0), order=0,
2059                           threshold=3, chan_avg_limit=1, plot=False,
2060                           insitu=None, rows=None):
2061        """\
2062        Return a scan which has been baselined (all rows) by a polynomial.
2063        Spectral lines are detected first using linefinder and masked out
2064        to avoid them affecting the baseline solution.
2065
2066        Parameters:
2067
2068            mask:       an optional mask retreived from scantable
2069
2070            edge:       an optional number of channel to drop at the edge of
2071                        spectrum. If only one value is
2072                        specified, the same number will be dropped from
2073                        both sides of the spectrum. Default is to keep
2074                        all channels. Nested tuples represent individual
2075                        edge selection for different IFs (a number of spectral
2076                        channels can be different)
2077
2078            order:      the order of the polynomial (default is 0)
2079
2080            threshold:  the threshold used by line finder. It is better to
2081                        keep it large as only strong lines affect the
2082                        baseline solution.
2083
2084            chan_avg_limit:
2085                        a maximum number of consequtive spectral channels to
2086                        average during the search of weak and broad lines.
2087                        The default is no averaging (and no search for weak
2088                        lines). If such lines can affect the fitted baseline
2089                        (e.g. a high order polynomial is fitted), increase this
2090                        parameter (usually values up to 8 are reasonable). Most
2091                        users of this method should find the default value
2092                        sufficient.
2093
2094            plot:       plot the fit and the residual. In this each
2095                        indivual fit has to be approved, by typing 'y'
2096                        or 'n'
2097
2098            insitu:     if False a new scantable is returned.
2099                        Otherwise, the scaling is done in-situ
2100                        The default is taken from .asaprc (False)
2101            rows:       row numbers of spectra to be processed.
2102                        (default is None: for all rows)
2103
2104
2105        Example::
2106
2107            scan2 = scan.auto_poly_baseline(order=7, insitu=False)
2108
2109        """
2110        if insitu is None: insitu = rcParams['insitu']
2111        varlist = vars()
2112        from asap.asaplinefind import linefinder
2113        from asap import _is_sequence_or_number as _is_valid
2114
2115        # check whether edge is set up for each IF individually
2116        individualedge = False;
2117        if len(edge) > 1:
2118            if isinstance(edge[0], list) or isinstance(edge[0], tuple):
2119                individualedge = True;
2120
2121        if not _is_valid(edge, int) and not individualedge:
2122            raise ValueError, "Parameter 'edge' has to be an integer or a \
2123            pair of integers specified as a tuple. Nested tuples are allowed \
2124            to make individual selection for different IFs."
2125
2126        curedge = (0, 0)
2127        if individualedge:
2128            for edgepar in edge:
2129                if not _is_valid(edgepar, int):
2130                    raise ValueError, "Each element of the 'edge' tuple has \
2131                                       to be a pair of integers or an integer."
2132        else:
2133            curedge = edge;
2134
2135        if not insitu:
2136            workscan = self.copy()
2137        else:
2138            workscan = self
2139
2140        # setup fitter
2141        f = fitter()
2142        f.set_function(lpoly=order)
2143
2144        # setup line finder
2145        fl = linefinder()
2146        fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
2147
2148        fl.set_scan(workscan)
2149
2150        if mask is None:
2151            mask = _n_bools(workscan.nchan(), True)
2152       
2153        if rows is None:
2154            rows = xrange(workscan.nrow())
2155        elif isinstance(rows, int):
2156            rows = [ rows ]
2157       
2158        # Save parameters of baseline fits & masklists as a class attribute.
2159        # NOTICE: It does not reflect changes in scantable!
2160        if len(rows) > 0:
2161            self.blpars=[]
2162            self.masklists=[]
2163            self.actualmask=[]
2164        asaplog.push("Processing:")
2165        for r in rows:
2166            msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
2167                (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
2168                 workscan.getpol(r), workscan.getcycle(r))
2169            asaplog.push(msg, False)
2170
2171            # figure out edge parameter
2172            if individualedge:
2173                if len(edge) >= workscan.getif(r):
2174                    raise RuntimeError, "Number of edge elements appear to " \
2175                                        "be less than the number of IFs"
2176                    curedge = edge[workscan.getif(r)]
2177
2178            actualmask = mask_and(mask, workscan._getmask(r))    # (CAS-1434)
2179
2180            # setup line finder
2181            fl.find_lines(r, actualmask, curedge)
2182           
2183            f.x = workscan._getabcissa(r)
2184            f.y = workscan._getspectrum(r)
2185            f.mask = fl.get_mask()
2186            f.data = None
2187            f.fit()
2188
2189            # Show mask list
2190            masklist=workscan.get_masklist(f.mask, row=r, silent=True)
2191            msg = "mask range: "+str(masklist)
2192            asaplog.push(msg, False)
2193
2194            if plot:
2195                f.plot(residual=True)
2196                x = raw_input("Accept fit ( [y]/n ): ")
2197                if x.upper() == 'N':
2198                    self.blpars.append(None)
2199                    self.masklists.append(None)
2200                    self.actualmask.append(None)
2201                    continue
2202
2203            workscan._setspectrum(f.fitter.getresidual(), r)
2204            self.blpars.append(f.get_parameters())
2205            self.masklists.append(masklist)
2206            self.actualmask.append(f.mask)
2207        if plot:
2208            f._p.unmap()
2209            f._p = None
2210        workscan._add_history("auto_poly_baseline", varlist)
2211        if insitu:
2212            self._assign(workscan)
2213        else:
2214            return workscan
2215
2216    @asaplog_post_dec
2217    def rotate_linpolphase(self, angle):
2218        """\
2219        Rotate the phase of the complex polarization O=Q+iU correlation.
2220        This is always done in situ in the raw data.  So if you call this
2221        function more than once then each call rotates the phase further.
2222
2223        Parameters:
2224
2225            angle:   The angle (degrees) to rotate (add) by.
2226
2227        Example::
2228
2229            scan.rotate_linpolphase(2.3)
2230
2231        """
2232        varlist = vars()
2233        self._math._rotate_linpolphase(self, angle)
2234        self._add_history("rotate_linpolphase", varlist)
2235        return
2236
2237    @asaplog_post_dec
2238    def rotate_xyphase(self, angle):
2239        """\
2240        Rotate the phase of the XY correlation.  This is always done in situ
2241        in the data.  So if you call this function more than once
2242        then each call rotates the phase further.
2243
2244        Parameters:
2245
2246            angle:   The angle (degrees) to rotate (add) by.
2247
2248        Example::
2249
2250            scan.rotate_xyphase(2.3)
2251
2252        """
2253        varlist = vars()
2254        self._math._rotate_xyphase(self, angle)
2255        self._add_history("rotate_xyphase", varlist)
2256        return
2257
2258    @asaplog_post_dec
2259    def swap_linears(self):
2260        """\
2261        Swap the linear polarisations XX and YY, or better the first two
2262        polarisations as this also works for ciculars.
2263        """
2264        varlist = vars()
2265        self._math._swap_linears(self)
2266        self._add_history("swap_linears", varlist)
2267        return
2268
2269    @asaplog_post_dec
2270    def invert_phase(self):
2271        """\
2272        Invert the phase of the complex polarisation
2273        """
2274        varlist = vars()
2275        self._math._invert_phase(self)
2276        self._add_history("invert_phase", varlist)
2277        return
2278
2279    @asaplog_post_dec
2280    def add(self, offset, insitu=None):
2281        """\
2282        Return a scan where all spectra have the offset added
2283
2284        Parameters:
2285
2286            offset:      the offset
2287
2288            insitu:      if False a new scantable is returned.
2289                         Otherwise, the scaling is done in-situ
2290                         The default is taken from .asaprc (False)
2291
2292        """
2293        if insitu is None: insitu = rcParams['insitu']
2294        self._math._setinsitu(insitu)
2295        varlist = vars()
2296        s = scantable(self._math._unaryop(self, offset, "ADD", False))
2297        s._add_history("add", varlist)
2298        if insitu:
2299            self._assign(s)
2300        else:
2301            return s
2302
2303    @asaplog_post_dec
2304    def scale(self, factor, tsys=True, insitu=None):
2305        """\
2306
2307        Return a scan where all spectra are scaled by the give 'factor'
2308
2309        Parameters:
2310
2311            factor:      the scaling factor (float or 1D float list)
2312
2313            insitu:      if False a new scantable is returned.
2314                         Otherwise, the scaling is done in-situ
2315                         The default is taken from .asaprc (False)
2316
2317            tsys:        if True (default) then apply the operation to Tsys
2318                         as well as the data
2319
2320        """
2321        if insitu is None: insitu = rcParams['insitu']
2322        self._math._setinsitu(insitu)
2323        varlist = vars()
2324        s = None
2325        import numpy
2326        if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
2327            if isinstance(factor[0], list) or isinstance(factor[0], numpy.ndarray):
2328                from asapmath import _array2dOp
2329                s = _array2dOp( self.copy(), factor, "MUL", tsys )
2330            else:
2331                s = scantable( self._math._arrayop( self.copy(), factor, "MUL", tsys ) )
2332        else:
2333            s = scantable(self._math._unaryop(self.copy(), factor, "MUL", tsys))
2334        s._add_history("scale", varlist)
2335        if insitu:
2336            self._assign(s)
2337        else:
2338            return s
2339
2340    def set_sourcetype(self, match, matchtype="pattern",
2341                       sourcetype="reference"):
2342        """\
2343        Set the type of the source to be an source or reference scan
2344        using the provided pattern.
2345
2346        Parameters:
2347
2348            match:          a Unix style pattern, regular expression or selector
2349
2350            matchtype:      'pattern' (default) UNIX style pattern or
2351                            'regex' regular expression
2352
2353            sourcetype:     the type of the source to use (source/reference)
2354
2355        """
2356        varlist = vars()
2357        basesel = self.get_selection()
2358        stype = -1
2359        if sourcetype.lower().startswith("r"):
2360            stype = 1
2361        elif sourcetype.lower().startswith("s"):
2362            stype = 0
2363        else:
2364            raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
2365        if matchtype.lower().startswith("p"):
2366            matchtype = "pattern"
2367        elif matchtype.lower().startswith("r"):
2368            matchtype = "regex"
2369        else:
2370            raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
2371        sel = selector()
2372        if isinstance(match, selector):
2373            sel = match
2374        else:
2375            sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
2376        self.set_selection(basesel+sel)
2377        self._setsourcetype(stype)
2378        self.set_selection(basesel)
2379        self._add_history("set_sourcetype", varlist)
2380
2381    @asaplog_post_dec
2382    @preserve_selection
2383    def auto_quotient(self, preserve=True, mode='paired', verify=False):
2384        """\
2385        This function allows to build quotients automatically.
2386        It assumes the observation to have the same number of
2387        "ons" and "offs"
2388
2389        Parameters:
2390
2391            preserve:       you can preserve (default) the continuum or
2392                            remove it.  The equations used are
2393
2394                            preserve: Output = Toff * (on/off) - Toff
2395
2396                            remove:   Output = Toff * (on/off) - Ton
2397
2398            mode:           the on/off detection mode
2399                            'paired' (default)
2400                            identifies 'off' scans by the
2401                            trailing '_R' (Mopra/Parkes) or
2402                            '_e'/'_w' (Tid) and matches
2403                            on/off pairs from the observing pattern
2404                            'time'
2405                            finds the closest off in time
2406
2407        .. todo:: verify argument is not implemented
2408
2409        """
2410        varlist = vars()
2411        modes = ["time", "paired"]
2412        if not mode in modes:
2413            msg = "please provide valid mode. Valid modes are %s" % (modes)
2414            raise ValueError(msg)
2415        s = None
2416        if mode.lower() == "paired":
2417            sel = self.get_selection()
2418            sel.set_query("SRCTYPE==psoff")
2419            self.set_selection(sel)
2420            offs = self.copy()
2421            sel.set_query("SRCTYPE==pson")
2422            self.set_selection(sel)
2423            ons = self.copy()
2424            s = scantable(self._math._quotient(ons, offs, preserve))
2425        elif mode.lower() == "time":
2426            s = scantable(self._math._auto_quotient(self, mode, preserve))
2427        s._add_history("auto_quotient", varlist)
2428        return s
2429
2430    @asaplog_post_dec
2431    def mx_quotient(self, mask = None, weight='median', preserve=True):
2432        """\
2433        Form a quotient using "off" beams when observing in "MX" mode.
2434
2435        Parameters:
2436
2437            mask:           an optional mask to be used when weight == 'stddev'
2438
2439            weight:         How to average the off beams.  Default is 'median'.
2440
2441            preserve:       you can preserve (default) the continuum or
2442                            remove it.  The equations used are:
2443
2444                                preserve: Output = Toff * (on/off) - Toff
2445
2446                                remove:   Output = Toff * (on/off) - Ton
2447
2448        """
2449        mask = mask or ()
2450        varlist = vars()
2451        on = scantable(self._math._mx_extract(self, 'on'))
2452        preoff = scantable(self._math._mx_extract(self, 'off'))
2453        off = preoff.average_time(mask=mask, weight=weight, scanav=False)
2454        from asapmath  import quotient
2455        q = quotient(on, off, preserve)
2456        q._add_history("mx_quotient", varlist)
2457        return q
2458
2459    @asaplog_post_dec
2460    def freq_switch(self, insitu=None):
2461        """\
2462        Apply frequency switching to the data.
2463
2464        Parameters:
2465
2466            insitu:      if False a new scantable is returned.
2467                         Otherwise, the swictching is done in-situ
2468                         The default is taken from .asaprc (False)
2469
2470        """
2471        if insitu is None: insitu = rcParams['insitu']
2472        self._math._setinsitu(insitu)
2473        varlist = vars()
2474        s = scantable(self._math._freqswitch(self))
2475        s._add_history("freq_switch", varlist)
2476        if insitu:
2477            self._assign(s)
2478        else:
2479            return s
2480
2481    @asaplog_post_dec
2482    def recalc_azel(self):
2483        """Recalculate the azimuth and elevation for each position."""
2484        varlist = vars()
2485        self._recalcazel()
2486        self._add_history("recalc_azel", varlist)
2487        return
2488
2489    @asaplog_post_dec
2490    def __add__(self, other):
2491        varlist = vars()
2492        s = None
2493        if isinstance(other, scantable):
2494            s = scantable(self._math._binaryop(self, other, "ADD"))
2495        elif isinstance(other, float):
2496            s = scantable(self._math._unaryop(self, other, "ADD", False))
2497        else:
2498            raise TypeError("Other input is not a scantable or float value")
2499        s._add_history("operator +", varlist)
2500        return s
2501
2502    @asaplog_post_dec
2503    def __sub__(self, other):
2504        """
2505        implicit on all axes and on Tsys
2506        """
2507        varlist = vars()
2508        s = None
2509        if isinstance(other, scantable):
2510            s = scantable(self._math._binaryop(self, other, "SUB"))
2511        elif isinstance(other, float):
2512            s = scantable(self._math._unaryop(self, other, "SUB", False))
2513        else:
2514            raise TypeError("Other input is not a scantable or float value")
2515        s._add_history("operator -", varlist)
2516        return s
2517
2518    @asaplog_post_dec
2519    def __mul__(self, other):
2520        """
2521        implicit on all axes and on Tsys
2522        """
2523        varlist = vars()
2524        s = None
2525        if isinstance(other, scantable):
2526            s = scantable(self._math._binaryop(self, other, "MUL"))
2527        elif isinstance(other, float):
2528            s = scantable(self._math._unaryop(self, other, "MUL", False))
2529        else:
2530            raise TypeError("Other input is not a scantable or float value")
2531        s._add_history("operator *", varlist)
2532        return s
2533
2534
2535    @asaplog_post_dec
2536    def __div__(self, other):
2537        """
2538        implicit on all axes and on Tsys
2539        """
2540        varlist = vars()
2541        s = None
2542        if isinstance(other, scantable):
2543            s = scantable(self._math._binaryop(self, other, "DIV"))
2544        elif isinstance(other, float):
2545            if other == 0.0:
2546                raise ZeroDivisionError("Dividing by zero is not recommended")
2547            s = scantable(self._math._unaryop(self, other, "DIV", False))
2548        else:
2549            raise TypeError("Other input is not a scantable or float value")
2550        s._add_history("operator /", varlist)
2551        return s
2552
2553    @asaplog_post_dec
2554    def get_fit(self, row=0):
2555        """\
2556        Print or return the stored fits for a row in the scantable
2557
2558        Parameters:
2559
2560            row:    the row which the fit has been applied to.
2561
2562        """
2563        if row > self.nrow():
2564            return
2565        from asap.asapfit import asapfit
2566        fit = asapfit(self._getfit(row))
2567        asaplog.push( '%s' %(fit) )
2568        return fit.as_dict()
2569
2570    def flag_nans(self):
2571        """\
2572        Utility function to flag NaN values in the scantable.
2573        """
2574        import numpy
2575        basesel = self.get_selection()
2576        for i in range(self.nrow()):
2577            sel = self.get_row_selector(i)
2578            self.set_selection(basesel+sel)
2579            nans = numpy.isnan(self._getspectrum(0))
2580        if numpy.any(nans):
2581            bnans = [ bool(v) for v in nans]
2582            self.flag(bnans)
2583        self.set_selection(basesel)
2584
2585    def get_row_selector(self, rowno):
2586        return selector(beams=self.getbeam(rowno),
2587                        ifs=self.getif(rowno),
2588                        pols=self.getpol(rowno),
2589                        scans=self.getscan(rowno),
2590                        cycles=self.getcycle(rowno))
2591
2592    def _add_history(self, funcname, parameters):
2593        if not rcParams['scantable.history']:
2594            return
2595        # create date
2596        sep = "##"
2597        from datetime import datetime
2598        dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
2599        hist = dstr+sep
2600        hist += funcname+sep#cdate+sep
2601        if parameters.has_key('self'): del parameters['self']
2602        for k, v in parameters.iteritems():
2603            if type(v) is dict:
2604                for k2, v2 in v.iteritems():
2605                    hist += k2
2606                    hist += "="
2607                    if isinstance(v2, scantable):
2608                        hist += 'scantable'
2609                    elif k2 == 'mask':
2610                        if isinstance(v2, list) or isinstance(v2, tuple):
2611                            hist += str(self._zip_mask(v2))
2612                        else:
2613                            hist += str(v2)
2614                    else:
2615                        hist += str(v2)
2616            else:
2617                hist += k
2618                hist += "="
2619                if isinstance(v, scantable):
2620                    hist += 'scantable'
2621                elif k == 'mask':
2622                    if isinstance(v, list) or isinstance(v, tuple):
2623                        hist += str(self._zip_mask(v))
2624                    else:
2625                        hist += str(v)
2626                else:
2627                    hist += str(v)
2628            hist += sep
2629        hist = hist[:-2] # remove trailing '##'
2630        self._addhistory(hist)
2631
2632
2633    def _zip_mask(self, mask):
2634        mask = list(mask)
2635        i = 0
2636        segments = []
2637        while mask[i:].count(1):
2638            i += mask[i:].index(1)
2639            if mask[i:].count(0):
2640                j = i + mask[i:].index(0)
2641            else:
2642                j = len(mask)
2643            segments.append([i, j])
2644            i = j
2645        return segments
2646
2647    def _get_ordinate_label(self):
2648        fu = "("+self.get_fluxunit()+")"
2649        import re
2650        lbl = "Intensity"
2651        if re.match(".K.", fu):
2652            lbl = "Brightness Temperature "+ fu
2653        elif re.match(".Jy.", fu):
2654            lbl = "Flux density "+ fu
2655        return lbl
2656
2657    def _check_ifs(self):
2658        nchans = [self.nchan(i) for i in range(self.nif(-1))]
2659        nchans = filter(lambda t: t > 0, nchans)
2660        return (sum(nchans)/len(nchans) == nchans[0])
2661
2662    @asaplog_post_dec
2663    #def _fill(self, names, unit, average, getpt, antenna):
2664    def _fill(self, names, unit, average, opts={}):
2665        first = True
2666        fullnames = []
2667        for name in names:
2668            name = os.path.expandvars(name)
2669            name = os.path.expanduser(name)
2670            if not os.path.exists(name):
2671                msg = "File '%s' does not exists" % (name)
2672                raise IOError(msg)
2673            fullnames.append(name)
2674        if average:
2675            asaplog.push('Auto averaging integrations')
2676        stype = int(rcParams['scantable.storage'].lower() == 'disk')
2677        for name in fullnames:
2678            tbl = Scantable(stype)
2679            r = filler(tbl)
2680            rx = rcParams['scantable.reference']
2681            r.setreferenceexpr(rx)
2682            msg = "Importing %s..." % (name)
2683            asaplog.push(msg, False)
2684            #opts = {'ms': {'antenna' : antenna, 'getpt': getpt} }
2685            r.open(name, opts)# antenna, -1, -1, getpt)
2686            r.fill()
2687            if average:
2688                tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
2689            if not first:
2690                tbl = self._math._merge([self, tbl])
2691            Scantable.__init__(self, tbl)
2692            r.close()
2693            del r, tbl
2694            first = False
2695            #flush log
2696        asaplog.post()
2697        if unit is not None:
2698            self.set_fluxunit(unit)
2699        if not is_casapy():
2700            self.set_freqframe(rcParams['scantable.freqframe'])
2701
2702    def __getitem__(self, key):
2703        if key < 0:
2704            key += self.nrow()
2705        if key >= self.nrow():
2706            raise IndexError("Row index out of range.")
2707        return self._getspectrum(key)
2708
2709    def __setitem__(self, key, value):
2710        if key < 0:
2711            key += self.nrow()
2712        if key >= self.nrow():
2713            raise IndexError("Row index out of range.")
2714        if not hasattr(value, "__len__") or \
2715                len(value) > self.nchan(self.getif(key)):
2716            raise ValueError("Spectrum length doesn't match.")
2717        return self._setspectrum(value, key)
2718
2719    def __len__(self):
2720        return self.nrow()
2721
2722    def __iter__(self):
2723        for i in range(len(self)):
2724            yield self[i]
Note: See TracBrowser for help on using the repository browser.