source: trunk/python/scantable.py @ 1857

Last change on this file since 1857 was 1857, checked in by Malte Marquarding, 14 years ago

auto_quotient is the first method to use @preserve_selection

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 90.9 KB
RevLine 
[1846]1"""This module defines the scantable class."""
2
[1697]3import os
[1691]4try:
5    from functools import wraps as wraps_dec
6except ImportError:
7    from asap.compatibility import wraps as wraps_dec
8
[1824]9from asap.env import is_casapy
[876]10from asap._asap import Scantable
[1843]11from asap._asap import filler
[1824]12from asap.parameters import rcParams
13from asap.logging import asaplog, print_log, print_log_dec
14from asap.selector import selector
15from asap.linecatalog import linecatalog
[1600]16from asap.coordinate import coordinate
[1824]17from asap.utils import _n_bools, mask_not, mask_and, mask_or
[102]18
[1689]19
20def preserve_selection(func):
[1691]21    @wraps_dec(func)
[1689]22    def wrap(obj, *args, **kw):
23        basesel = obj.get_selection()
[1857]24        try:
25            val = func(obj, *args, **kw)
26        finally:
27            obj.set_selection(basesel)
[1689]28        return val
29    return wrap
30
[1846]31def is_scantable(filename):
32    """Is the given file a scantable?
[1689]33
[1846]34    Parameters:
35
36        filename: the name of the file/directory to test
37
38    """
[1697]39    return (os.path.isdir(filename)
40            and not os.path.exists(filename+'/table.f1')
41            and os.path.exists(filename+'/table.info'))
42
43
[876]44class scantable(Scantable):
[1846]45    """\
46        The ASAP container for scans (single-dish data).
[102]47    """
[1819]48
[1589]49    @print_log_dec
[1824]50    def __init__(self, filename, average=None, unit=None, getpt=None,
51                 antenna=None, parallactify=None):
[1846]52        """\
[102]53        Create a scantable from a saved one or make a reference
[1846]54
[102]55        Parameters:
[1846]56
57            filename:     the name of an asap table on disk
58                          or
59                          the name of a rpfits/sdfits/ms file
60                          (integrations within scans are auto averaged
61                          and the whole file is read) or
62                          [advanced] a reference to an existing scantable
63
64            average:      average all integrations withinb a scan on read.
65                          The default (True) is taken from .asaprc.
66
[484]67            unit:         brightness unit; must be consistent with K or Jy.
[1846]68                          Over-rides the default selected by the filler
69                          (input rpfits/sdfits/ms) or replaces the value
70                          in existing scantables
71
72            getpt:        for MeasurementSet input data only:
73                          If True, all pointing data are filled.
74                          The deafult is False, which makes time to load
75                          the MS data faster in some cases.
76
77            antenna:      Antenna selection. integer (id) or string (name or id).
78
79            parallactify: Indicate that the data had been parallatified. Default
80                          is taken from rc file.
81
[710]82        """
[976]83        if average is None:
[710]84            average = rcParams['scantable.autoaverage']
[1819]85        if getpt is None:
86            getpt = True
[1843]87        if antenna is not None:
88            asaplog.push("Antenna selection currently unsupported."
89                         "Using '0'")
90            print_log('WARN')
[1819]91        if antenna is None:
92            antenna = ''
93        elif type(antenna) == int:
[1843]94            antenna = '%s' % antenna
[1819]95        elif type(antenna) == list:
96            tmpstr = ''
97            for i in range( len(antenna) ):
98                if type(antenna[i]) == int:
99                    tmpstr = tmpstr + ('%s,'%(antenna[i]))
100                elif type(antenna[i]) == str:
101                    tmpstr=tmpstr+antenna[i]+','
102                else:
103                    asaplog.push('Bad antenna selection.')
104                    print_log('ERROR')
105                    return
106            antenna = tmpstr.rstrip(',')
[1593]107        parallactify = parallactify or rcParams['scantable.parallactify']
[1259]108        varlist = vars()
[876]109        from asap._asap import stmath
[1819]110        self._math = stmath( rcParams['insitu'] )
[876]111        if isinstance(filename, Scantable):
112            Scantable.__init__(self, filename)
[181]113        else:
[1697]114            if isinstance(filename, str):
[976]115                filename = os.path.expandvars(filename)
116                filename = os.path.expanduser(filename)
117                if not os.path.exists(filename):
118                    s = "File '%s' not found." % (filename)
[718]119                    if rcParams['verbose']:
[976]120                        asaplog.push(s)
[1819]121                        print_log('ERROR')
[718]122                        return
[976]123                    raise IOError(s)
[1697]124                if is_scantable(filename):
125                    ondisk = rcParams['scantable.storage'] == 'disk'
126                    Scantable.__init__(self, filename, ondisk)
127                    if unit is not None:
128                        self.set_fluxunit(unit)
[1819]129                    # do not reset to the default freqframe
130                    #self.set_freqframe(rcParams['scantable.freqframe'])
131                elif os.path.isdir(filename) \
132                         and not os.path.exists(filename+'/table.f1'):
133                    msg = "The given file '%s'is not a valid " \
134                          "asap table." % (filename)
135                    if rcParams['verbose']:
136                        #print msg
137                        asaplog.push( msg )
138                        print_log( 'ERROR' )
139                        return
140                    else:
141                        raise IOError(msg)
[226]142                else:
[1819]143                    self._fill([filename], unit, average, getpt, antenna)
[1118]144            elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]145                  and isinstance(filename[-1], str):
[1819]146                self._fill(filename, unit, average, getpt, antenna)
[1586]147        self.parallactify(parallactify)
[1259]148        self._add_history("scantable", varlist)
[102]149
[1589]150    @print_log_dec
[876]151    def save(self, name=None, format=None, overwrite=False):
[1846]152        """\
[1280]153        Store the scantable on disk. This can be an asap (aips++) Table,
154        SDFITS or MS2 format.
[1846]155
[116]156        Parameters:
[1846]157
[1093]158            name:        the name of the outputfile. For format "ASCII"
159                         this is the root file name (data in 'name'.txt
[497]160                         and header in 'name'_header.txt)
[1855]161
[116]162            format:      an optional file format. Default is ASAP.
[1855]163                         Allowed are:
164
165                            * 'ASAP' (save as ASAP [aips++] Table),
166                            * 'SDFITS' (save as SDFITS file)
167                            * 'ASCII' (saves as ascii text file)
168                            * 'MS2' (saves as an casacore MeasurementSet V2)
169                            * 'FITS' (save as image FITS - not readable by class)
170                            * 'CLASS' (save as FITS readable by CLASS)
171
[411]172            overwrite:   If the file should be overwritten if it exists.
[256]173                         The default False is to return with warning
[411]174                         without writing the output. USE WITH CARE.
[1855]175
[1846]176        Example::
177
[116]178            scan.save('myscan.asap')
[1118]179            scan.save('myscan.sdfits', 'SDFITS')
[1846]180
[116]181        """
[411]182        from os import path
[1593]183        format = format or rcParams['scantable.save']
[256]184        suffix = '.'+format.lower()
[1118]185        if name is None or name == "":
[256]186            name = 'scantable'+suffix
[718]187            msg = "No filename given. Using default name %s..." % name
188            asaplog.push(msg)
[411]189        name = path.expandvars(name)
[256]190        if path.isfile(name) or path.isdir(name):
191            if not overwrite:
[718]192                msg = "File %s exists." % name
193                if rcParams['verbose']:
[1819]194                    #print msg
195                    asaplog.push( msg )
196                    print_log( 'ERROR' )
[718]197                    return
198                else:
199                    raise IOError(msg)
[451]200        format2 = format.upper()
201        if format2 == 'ASAP':
[116]202            self._save(name)
203        else:
[989]204            from asap._asap import stwriter as stw
[1118]205            writer = stw(format2)
206            writer.write(self, name)
[116]207        return
208
[102]209    def copy(self):
[1846]210        """Return a copy of this scantable.
211
212        *Note*:
213
[1348]214            This makes a full (deep) copy. scan2 = scan1 makes a reference.
[1846]215
216        Example::
217
[102]218            copiedscan = scan.copy()
[1846]219
[102]220        """
[876]221        sd = scantable(Scantable._copy(self))
[113]222        return sd
223
[1093]224    def drop_scan(self, scanid=None):
[1846]225        """\
[1093]226        Return a new scantable where the specified scan number(s) has(have)
227        been dropped.
[1846]228
[1093]229        Parameters:
[1846]230
[1093]231            scanid:    a (list of) scan number(s)
[1846]232
[1093]233        """
234        from asap import _is_sequence_or_number as _is_valid
235        from asap import _to_list
236        from asap import unique
237        if not _is_valid(scanid):
238            if rcParams['verbose']:
[1819]239                asaplog.push( 'Please specify a scanno to drop from the scantable' )
240                print_log( 'ERROR' )
[1093]241                return
242            else:
243                raise RuntimeError("No scan given")
244        try:
245            scanid = _to_list(scanid)
246            allscans = unique([ self.getscan(i) for i in range(self.nrow())])
247            for sid in scanid: allscans.remove(sid)
[1118]248            if len(allscans) == 0:
249                raise ValueError("Can't remove all scans")
[1093]250        except ValueError:
251            if rcParams['verbose']:
[1819]252                print_log()
253                asaplog.push( "Couldn't find any match." )
254                print_log( 'ERROR' )
[1093]255                return
256            else: raise
257        try:
[1593]258            sel = selector(scans=allscans)
[1594]259            return self._select_copy(sel)
[1093]260        except RuntimeError:
[1118]261            if rcParams['verbose']:
[1819]262                print_log()
263                asaplog.push( "Couldn't find any match." )
264                print_log( 'ERROR' )
[1118]265            else:
266                raise
[1093]267
[1594]268    def _select_copy(self, selection):
269        orig = self.get_selection()
270        self.set_selection(orig+selection)
271        cp = self.copy()
272        self.set_selection(orig)
273        return cp
274
[102]275    def get_scan(self, scanid=None):
[1855]276        """\
[102]277        Return a specific scan (by scanno) or collection of scans (by
278        source name) in a new scantable.
[1846]279
280        *Note*:
281
[1348]282            See scantable.drop_scan() for the inverse operation.
[1846]283
[102]284        Parameters:
[1846]285
[513]286            scanid:    a (list of) scanno or a source name, unix-style
287                       patterns are accepted for source name matching, e.g.
288                       '*_R' gets all 'ref scans
[1846]289
290        Example::
291
[513]292            # get all scans containing the source '323p459'
293            newscan = scan.get_scan('323p459')
294            # get all 'off' scans
295            refscans = scan.get_scan('*_R')
296            # get a susbset of scans by scanno (as listed in scan.summary())
[1118]297            newscan = scan.get_scan([0, 2, 7, 10])
[1846]298
[102]299        """
300        if scanid is None:
[718]301            if rcParams['verbose']:
[1819]302                #print "Please specify a scan no or name to " \
303                #      "retrieve from the scantable"
[1846]304                asaplog.push( 'Please specify a scan no or name to retrieve'
305                             ' from the scantable' )
[1819]306                print_log( 'ERROR' )
[718]307                return
308            else:
309                raise RuntimeError("No scan given")
310
[102]311        try:
[946]312            bsel = self.get_selection()
313            sel = selector()
[102]314            if type(scanid) is str:
[946]315                sel.set_name(scanid)
[1594]316                return self._select_copy(sel)
[102]317            elif type(scanid) is int:
[946]318                sel.set_scans([scanid])
[1594]319                return self._select_copy(sel)
[381]320            elif type(scanid) is list:
[946]321                sel.set_scans(scanid)
[1594]322                return self._select_copy(sel)
[381]323            else:
[718]324                msg = "Illegal scanid type, use 'int' or 'list' if ints."
325                if rcParams['verbose']:
[1819]326                    #print msg
327                    asaplog.push( msg )
328                    print_log( 'ERROR' )
[718]329                else:
330                    raise TypeError(msg)
[102]331        except RuntimeError:
[1819]332            if rcParams['verbose']:
333                #print "Couldn't find any match."
334                print_log()
335                asaplog.push( "Couldn't find any match." )
336                print_log( 'ERROR' )
[718]337            else: raise
[102]338
339    def __str__(self):
[1118]340        return Scantable._summary(self, True)
[102]341
[976]342    def summary(self, filename=None):
[1846]343        """\
[102]344        Print a summary of the contents of this scantable.
[1846]345
[102]346        Parameters:
[1846]347
[102]348            filename:    the name of a file to write the putput to
349                         Default - no file output
[1846]350
[102]351        """
[976]352        info = Scantable._summary(self, True)
[102]353        if filename is not None:
[256]354            if filename is "":
355                filename = 'scantable_summary.txt'
[415]356            from os.path import expandvars, isdir
[411]357            filename = expandvars(filename)
[415]358            if not isdir(filename):
[413]359                data = open(filename, 'w')
360                data.write(info)
361                data.close()
362            else:
[718]363                msg = "Illegal file name '%s'." % (filename)
364                if rcParams['verbose']:
[1819]365                    #print msg
366                    asaplog.push( msg )
367                    print_log( 'ERROR' )
[718]368                else:
369                    raise IOError(msg)
370        if rcParams['verbose']:
[794]371            try:
372                from IPython.genutils import page as pager
373            except ImportError:
374                from pydoc import pager
375            pager(info)
[718]376        else:
377            return info
[710]378
[1512]379    def get_spectrum(self, rowno):
[1471]380        """Return the spectrum for the current row in the scantable as a list.
[1846]381
[1471]382        Parameters:
[1846]383
[1573]384             rowno:   the row number to retrieve the spectrum from
[1846]385
[1471]386        """
387        return self._getspectrum(rowno)
[946]388
[1471]389    def get_mask(self, rowno):
390        """Return the mask for the current row in the scantable as a list.
[1846]391
[1471]392        Parameters:
[1846]393
[1573]394             rowno:   the row number to retrieve the mask from
[1846]395
[1471]396        """
397        return self._getmask(rowno)
398
399    def set_spectrum(self, spec, rowno):
400        """Return the spectrum for the current row in the scantable as a list.
[1846]401
[1471]402        Parameters:
[1846]403
[1855]404             spec:   the new spectrum
[1846]405
[1855]406             rowno:  the row number to set the spectrum for
407
[1471]408        """
409        assert(len(spec) == self.nchan())
410        return self._setspectrum(spec, rowno)
411
[1600]412    def get_coordinate(self, rowno):
413        """Return the (spectral) coordinate for a a given 'rowno'.
[1846]414
415        *Note*:
416
[1600]417            * This coordinate is only valid until a scantable method modifies
418              the frequency axis.
419            * This coordinate does contain the original frequency set-up
420              NOT the new frame. The conversions however are done using the user
421              specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
422              use scantable.freq_align first. Without it there is no closure,
[1846]423              i.e.::
[1600]424
[1846]425                  c = myscan.get_coordinate(0)
426                  c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
427
[1600]428        Parameters:
[1846]429
[1600]430             rowno:    the row number for the spectral coordinate
431
432        """
433        return coordinate(Scantable.get_coordinate(self, rowno))
434
[946]435    def get_selection(self):
[1846]436        """\
[1005]437        Get the selection object currently set on this scantable.
[1846]438
439        Example::
440
[1005]441            sel = scan.get_selection()
442            sel.set_ifs(0)              # select IF 0
443            scan.set_selection(sel)     # apply modified selection
[1846]444
[946]445        """
446        return selector(self._getselection())
447
[1576]448    def set_selection(self, selection=None, **kw):
[1846]449        """\
[1005]450        Select a subset of the data. All following operations on this scantable
451        are only applied to thi selection.
[1846]452
[1005]453        Parameters:
[1697]454
[1846]455            selection:    a selector object (default unset the selection), or
456                          any combination of "pols", "ifs", "beams", "scans",
457                          "cycles", "name", "query"
[1697]458
[1846]459        Examples::
[1697]460
[1005]461            sel = selector()         # create a selection object
[1118]462            self.set_scans([0, 3])    # select SCANNO 0 and 3
[1005]463            scan.set_selection(sel)  # set the selection
464            scan.summary()           # will only print summary of scanno 0 an 3
465            scan.set_selection()     # unset the selection
[1697]466            # or the equivalent
467            scan.set_selection(scans=[0,3])
468            scan.summary()           # will only print summary of scanno 0 an 3
469            scan.set_selection()     # unset the selection
[1846]470
[946]471        """
[1576]472        if selection is None:
473            # reset
474            if len(kw) == 0:
475                selection = selector()
476            else:
477                # try keywords
478                for k in kw:
479                    if k not in selector.fields:
480                        raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
481                selection = selector(**kw)
[946]482        self._setselection(selection)
483
[1819]484    def get_row(self, row=0, insitu=None):
[1846]485        """\
[1819]486        Select a row in the scantable.
487        Return a scantable with single row.
[1846]488
[1819]489        Parameters:
[1846]490
491            row:    row no of integration, default is 0.
492            insitu: if False a new scantable is returned. Otherwise, the
493                    scaling is done in-situ. The default is taken from .asaprc
494                    (False)
495
[1819]496        """
497        if insitu is None: insitu = rcParams['insitu']
498        if not insitu:
499            workscan = self.copy()
500        else:
501            workscan = self
502        # Select a row
503        sel=selector()
504        sel.set_scans([workscan.getscan(row)])
505        sel.set_cycles([workscan.getcycle(row)])
506        sel.set_beams([workscan.getbeam(row)])
507        sel.set_ifs([workscan.getif(row)])
508        sel.set_polarisations([workscan.getpol(row)])
509        sel.set_name(workscan._getsourcename(row))
510        workscan.set_selection(sel)
511        if not workscan.nrow() == 1:
512            msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
513            raise RuntimeError(msg)
514        del sel
515        if insitu:
516            self._assign(workscan)
517        else:
518            return workscan
519
520    #def stats(self, stat='stddev', mask=None):
521    def stats(self, stat='stddev', mask=None, form='3.3f'):
[1846]522        """\
[135]523        Determine the specified statistic of the current beam/if/pol
[102]524        Takes a 'mask' as an optional parameter to specify which
525        channels should be excluded.
[1846]526
[102]527        Parameters:
[1846]528
[1819]529            stat:    'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
530                     'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
[1855]531
[135]532            mask:    an optional mask specifying where the statistic
[102]533                     should be determined.
[1855]534
[1819]535            form:    format string to print statistic values
[1846]536
537        Example::
538
[113]539            scan.set_unit('channel')
[1118]540            msk = scan.create_mask([100, 200], [500, 600])
[135]541            scan.stats(stat='mean', mask=m)
[1846]542
[102]543        """
[1593]544        mask = mask or []
[876]545        if not self._check_ifs():
[1118]546            raise ValueError("Cannot apply mask as the IFs have different "
547                             "number of channels. Please use setselection() "
548                             "to select individual IFs")
[1819]549        rtnabc = False
550        if stat.lower().endswith('_abc'): rtnabc = True
551        getchan = False
552        if stat.lower().startswith('min') or stat.lower().startswith('max'):
553            chan = self._math._minmaxchan(self, mask, stat)
554            getchan = True
555            statvals = []
556        if not rtnabc: statvals = self._math._stats(self, mask, stat)
[256]557
[1819]558        #def cb(i):
559        #    return statvals[i]
[256]560
[1819]561        #return self._row_callback(cb, stat)
[102]562
[1819]563        label=stat
564        #callback=cb
565        out = ""
566        #outvec = []
567        sep = '-'*50
568        for i in range(self.nrow()):
569            refstr = ''
570            statunit= ''
571            if getchan:
572                qx, qy = self.chan2data(rowno=i, chan=chan[i])
573                if rtnabc:
574                    statvals.append(qx['value'])
575                    refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
576                    statunit= '['+qx['unit']+']'
577                else:
578                    refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
579
580            tm = self._gettime(i)
581            src = self._getsourcename(i)
582            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
583            out += 'Time[%s]:\n' % (tm)
584            if self.nbeam(-1) > 1:
585                out +=  ' Beam[%d] ' % (self.getbeam(i))
586            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (self.getif(i))
587            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (self.getpol(i))
588            #outvec.append(callback(i))
589            #out += ('= %'+form) % (outvec[i]) +'   '+refstr+'\n'
590            out += ('= %'+form) % (statvals[i]) +'   '+refstr+'\n'
591            out +=  sep+"\n"
592
593        if rcParams['verbose']:
594            import os
595            if os.environ.has_key( 'USER' ):
596                usr=os.environ['USER']
597            else:
598                import commands
599                usr=commands.getoutput( 'whoami' )
600            tmpfile='/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
601            f=open(tmpfile,'w')
602            print >> f, sep
603            print >> f, ' %s %s' % (label, statunit)
604            print >> f, sep
605            print >> f, out
606            f.close()
607            f=open(tmpfile,'r')
608            x=f.readlines()
609            f.close()
610            blanc=''
611            asaplog.push(blanc.join(x), False)
612            #for xx in x:
613            #    asaplog.push( xx, False )
614            print_log()
615        return statvals
616
617    def chan2data(self, rowno=0, chan=0):
[1846]618        """\
[1819]619        Returns channel/frequency/velocity and spectral value
620        at an arbitrary row and channel in the scantable.
[1846]621
[1819]622        Parameters:
[1846]623
[1819]624            rowno:   a row number in the scantable. Default is the
625                     first row, i.e. rowno=0
[1855]626
[1819]627            chan:    a channel in the scantable. Default is the first
628                     channel, i.e. pos=0
[1846]629
[1819]630        """
631        if isinstance(rowno, int) and isinstance(chan, int):
632            qx = {'unit': self.get_unit(),
633                  'value': self._getabcissa(rowno)[chan]}
634            qy = {'unit': self.get_fluxunit(),
635                  'value': self._getspectrum(rowno)[chan]}
636            return qx, qy
637
[1118]638    def stddev(self, mask=None):
[1846]639        """\
[135]640        Determine the standard deviation of the current beam/if/pol
641        Takes a 'mask' as an optional parameter to specify which
642        channels should be excluded.
[1846]643
[135]644        Parameters:
[1846]645
[135]646            mask:    an optional mask specifying where the standard
647                     deviation should be determined.
648
[1846]649        Example::
650
[135]651            scan.set_unit('channel')
[1118]652            msk = scan.create_mask([100, 200], [500, 600])
[135]653            scan.stddev(mask=m)
[1846]654
[135]655        """
[1118]656        return self.stats(stat='stddev', mask=mask);
[135]657
[1003]658
[1259]659    def get_column_names(self):
[1846]660        """\
[1003]661        Return a  list of column names, which can be used for selection.
662        """
[1259]663        return list(Scantable.get_column_names(self))
[1003]664
[1730]665    def get_tsys(self, row=-1):
[1846]666        """\
[113]667        Return the System temperatures.
[1846]668
669        Parameters:
670
671            row:    the rowno to get the information for. (default all rows)
672
[113]673        Returns:
[1846]674
[876]675            a list of Tsys values for the current selection
[1846]676
[113]677        """
[1730]678        if row > -1:
679            return self._get_column(self._gettsys, row)
[876]680        return self._row_callback(self._gettsys, "Tsys")
[256]681
[1730]682
683    def get_weather(self, row=-1):
[1846]684        """\
685        Return the weather informations.
686
687        Parameters:
688
689            row:    the rowno to get the information for. (default all rows)
690
691        Returns:
692
693            a dict or list of of dicts of values for the current selection
694
695        """
696
[1730]697        values = self._get_column(self._get_weather, row)
698        if row > -1:
699            return {'temperature': values[0],
700                    'pressure': values[1], 'humidity' : values[2],
701                    'windspeed' : values[3], 'windaz' : values[4]
702                    }
703        else:
704            out = []
705            for r in values:
706
707                out.append({'temperature': r[0],
708                            'pressure': r[1], 'humidity' : r[2],
709                            'windspeed' : r[3], 'windaz' : r[4]
710                    })
711            return out
712
[876]713    def _row_callback(self, callback, label):
714        out = ""
[1118]715        outvec = []
[1590]716        sep = '-'*50
[876]717        for i in range(self.nrow()):
718            tm = self._gettime(i)
719            src = self._getsourcename(i)
[1590]720            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
[876]721            out += 'Time[%s]:\n' % (tm)
[1590]722            if self.nbeam(-1) > 1:
723                out +=  ' Beam[%d] ' % (self.getbeam(i))
724            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (self.getif(i))
725            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (self.getpol(i))
[876]726            outvec.append(callback(i))
727            out += '= %3.3f\n' % (outvec[i])
[1590]728            out +=  sep+'\n'
[876]729        if rcParams['verbose']:
[1819]730            asaplog.push(sep)
731            asaplog.push(" %s" % (label))
732            asaplog.push(sep)
733            asaplog.push(out)
734            print_log()
[1175]735        return outvec
[256]736
[1070]737    def _get_column(self, callback, row=-1):
738        """
739        """
740        if row == -1:
741            return [callback(i) for i in range(self.nrow())]
742        else:
[1819]743            if  0 <= row < self.nrow():
[1070]744                return callback(row)
[256]745
[1070]746
[1348]747    def get_time(self, row=-1, asdatetime=False):
[1846]748        """\
[113]749        Get a list of time stamps for the observations.
[1348]750        Return a datetime object for each integration time stamp in the scantable.
[1846]751
[113]752        Parameters:
[1846]753
[1348]754            row:          row no of integration. Default -1 return all rows
[1855]755
[1348]756            asdatetime:   return values as datetime objects rather than strings
[1846]757
[113]758        """
[1175]759        from time import strptime
760        from datetime import datetime
[1392]761        times = self._get_column(self._gettime, row)
[1348]762        if not asdatetime:
[1392]763            return times
[1175]764        format = "%Y/%m/%d/%H:%M:%S"
765        if isinstance(times, list):
766            return [datetime(*strptime(i, format)[:6]) for i in times]
767        else:
768            return datetime(*strptime(times, format)[:6])
[102]769
[1348]770
771    def get_inttime(self, row=-1):
[1846]772        """\
[1348]773        Get a list of integration times for the observations.
774        Return a time in seconds for each integration in the scantable.
[1846]775
[1348]776        Parameters:
[1846]777
[1348]778            row:    row no of integration. Default -1 return all rows.
[1846]779
[1348]780        """
[1573]781        return self._get_column(self._getinttime, row)
[1348]782
[1573]783
[714]784    def get_sourcename(self, row=-1):
[1846]785        """\
[794]786        Get a list source names for the observations.
[714]787        Return a string for each integration in the scantable.
788        Parameters:
[1846]789
[1348]790            row:    row no of integration. Default -1 return all rows.
[1846]791
[714]792        """
[1070]793        return self._get_column(self._getsourcename, row)
[714]794
[794]795    def get_elevation(self, row=-1):
[1846]796        """\
[794]797        Get a list of elevations for the observations.
798        Return a float for each integration in the scantable.
[1846]799
[794]800        Parameters:
[1846]801
[1348]802            row:    row no of integration. Default -1 return all rows.
[1846]803
[794]804        """
[1070]805        return self._get_column(self._getelevation, row)
[794]806
807    def get_azimuth(self, row=-1):
[1846]808        """\
[794]809        Get a list of azimuths for the observations.
810        Return a float for each integration in the scantable.
[1846]811
[794]812        Parameters:
[1348]813            row:    row no of integration. Default -1 return all rows.
[1846]814
[794]815        """
[1070]816        return self._get_column(self._getazimuth, row)
[794]817
818    def get_parangle(self, row=-1):
[1846]819        """\
[794]820        Get a list of parallactic angles for the observations.
821        Return a float for each integration in the scantable.
[1846]822
[794]823        Parameters:
[1846]824
[1348]825            row:    row no of integration. Default -1 return all rows.
[1846]826
[794]827        """
[1070]828        return self._get_column(self._getparangle, row)
[794]829
[1070]830    def get_direction(self, row=-1):
831        """
832        Get a list of Positions on the sky (direction) for the observations.
[1594]833        Return a string for each integration in the scantable.
[1855]834
[1070]835        Parameters:
[1855]836
[1070]837            row:    row no of integration. Default -1 return all rows
[1855]838
[1070]839        """
840        return self._get_column(self._getdirection, row)
841
[1391]842    def get_directionval(self, row=-1):
[1846]843        """\
[1391]844        Get a list of Positions on the sky (direction) for the observations.
845        Return a float for each integration in the scantable.
[1846]846
[1391]847        Parameters:
[1846]848
[1391]849            row:    row no of integration. Default -1 return all rows
[1846]850
[1391]851        """
852        return self._get_column(self._getdirectionvec, row)
853
[1730]854    @print_log_dec
[102]855    def set_unit(self, unit='channel'):
[1846]856        """\
[102]857        Set the unit for all following operations on this scantable
[1846]858
[102]859        Parameters:
[1846]860
861            unit:    optional unit, default is 'channel'. Use one of '*Hz',
862                     'km/s', 'channel' or equivalent ''
863
[102]864        """
[484]865        varlist = vars()
[1118]866        if unit in ['', 'pixel', 'channel']:
[113]867            unit = ''
868        inf = list(self._getcoordinfo())
869        inf[0] = unit
870        self._setcoordinfo(inf)
[1118]871        self._add_history("set_unit", varlist)
[113]872
[1589]873    @print_log_dec
[484]874    def set_instrument(self, instr):
[1846]875        """\
[1348]876        Set the instrument for subsequent processing.
[1846]877
[358]878        Parameters:
[1846]879
[710]880            instr:    Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]881                      'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[1846]882
[358]883        """
884        self._setInstrument(instr)
[1118]885        self._add_history("set_instument", vars())
[358]886
[1589]887    @print_log_dec
[1190]888    def set_feedtype(self, feedtype):
[1846]889        """\
[1190]890        Overwrite the feed type, which might not be set correctly.
[1846]891
[1190]892        Parameters:
[1846]893
[1190]894            feedtype:     'linear' or 'circular'
[1846]895
[1190]896        """
897        self._setfeedtype(feedtype)
898        self._add_history("set_feedtype", vars())
899
[1589]900    @print_log_dec
[276]901    def set_doppler(self, doppler='RADIO'):
[1846]902        """\
[276]903        Set the doppler for all following operations on this scantable.
[1846]904
[276]905        Parameters:
[1846]906
[276]907            doppler:    One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
[1846]908
[276]909        """
[484]910        varlist = vars()
[276]911        inf = list(self._getcoordinfo())
912        inf[2] = doppler
913        self._setcoordinfo(inf)
[1118]914        self._add_history("set_doppler", vars())
[710]915
[1589]916    @print_log_dec
[226]917    def set_freqframe(self, frame=None):
[1846]918        """\
[113]919        Set the frame type of the Spectral Axis.
[1846]920
[113]921        Parameters:
[1846]922
[591]923            frame:   an optional frame type, default 'LSRK'. Valid frames are:
[1819]924                     'TOPO', 'LSRD', 'LSRK', 'BARY',
[1118]925                     'GEO', 'GALACTO', 'LGROUP', 'CMB'
[1846]926
927        Example::
928
[113]929            scan.set_freqframe('BARY')
[1846]930
[113]931        """
[1593]932        frame = frame or rcParams['scantable.freqframe']
[484]933        varlist = vars()
[1819]934        # "REST" is not implemented in casacore
935        #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
936        #           'GEO', 'GALACTO', 'LGROUP', 'CMB']
937        valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
[1118]938                   'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]939
[989]940        if frame in valid:
[113]941            inf = list(self._getcoordinfo())
942            inf[1] = frame
943            self._setcoordinfo(inf)
[1118]944            self._add_history("set_freqframe", varlist)
[102]945        else:
[1118]946            msg  = "Please specify a valid freq type. Valid types are:\n", valid
[718]947            if rcParams['verbose']:
[1819]948                #print msg
949                asaplog.push( msg )
950                print_log( 'ERROR' )
[718]951            else:
952                raise TypeError(msg)
[710]953
[989]954    def set_dirframe(self, frame=""):
[1846]955        """\
[989]956        Set the frame type of the Direction on the sky.
[1846]957
[989]958        Parameters:
[1846]959
[989]960            frame:   an optional frame type, default ''. Valid frames are:
961                     'J2000', 'B1950', 'GALACTIC'
[1846]962
963        Example:
964
[989]965            scan.set_dirframe('GALACTIC')
[1846]966
[989]967        """
968        varlist = vars()
969        try:
970            Scantable.set_dirframe(self, frame)
[1118]971        except RuntimeError, msg:
[989]972            if rcParams['verbose']:
[1819]973                #print msg
974                print_log()
975                asaplog.push( str(msg) )
976                print_log( 'ERROR' )
[989]977            else:
978                raise
[1118]979        self._add_history("set_dirframe", varlist)
[989]980
[113]981    def get_unit(self):
[1846]982        """\
[113]983        Get the default unit set in this scantable
[1846]984
[113]985        Returns:
[1846]986
[113]987            A unit string
[1846]988
[113]989        """
990        inf = self._getcoordinfo()
991        unit = inf[0]
992        if unit == '': unit = 'channel'
993        return unit
[102]994
[1856]995    @print_log_dec
[158]996    def get_abcissa(self, rowno=0):
[1846]997        """\
[158]998        Get the abcissa in the current coordinate setup for the currently
[113]999        selected Beam/IF/Pol
[1846]1000
[113]1001        Parameters:
[1846]1002
[226]1003            rowno:    an optional row number in the scantable. Default is the
1004                      first row, i.e. rowno=0
[1846]1005
[113]1006        Returns:
[1846]1007
[1348]1008            The abcissa values and the format string (as a dictionary)
[1846]1009
[113]1010        """
[256]1011        abc = self._getabcissa(rowno)
[710]1012        lbl = self._getabcissalabel(rowno)
[158]1013        return abc, lbl
[113]1014
[1819]1015    def flag(self, mask=None, unflag=False):
[1846]1016        """\
[1001]1017        Flag the selected data using an optional channel mask.
[1846]1018
[1001]1019        Parameters:
[1846]1020
[1001]1021            mask:   an optional channel mask, created with create_mask. Default
1022                    (no mask) is all channels.
[1855]1023
[1819]1024            unflag:    if True, unflag the data
[1846]1025
[1001]1026        """
1027        varlist = vars()
[1593]1028        mask = mask or []
[1001]1029        try:
[1819]1030            self._flag(mask, unflag)
[1118]1031        except RuntimeError, msg:
[1001]1032            if rcParams['verbose']:
[1819]1033                #print msg
1034                print_log()
1035                asaplog.push( str(msg) )
1036                print_log( 'ERROR' )
[1001]1037                return
1038            else: raise
1039        self._add_history("flag", varlist)
1040
[1819]1041    def flag_row(self, rows=[], unflag=False):
[1846]1042        """\
[1819]1043        Flag the selected data in row-based manner.
[1846]1044
[1819]1045        Parameters:
[1846]1046
[1843]1047            rows:   list of row numbers to be flagged. Default is no row
1048                    (must be explicitly specified to execute row-based flagging).
[1855]1049
[1819]1050            unflag: if True, unflag the data.
[1846]1051
[1819]1052        """
1053        varlist = vars()
1054        try:
1055            self._flag_row(rows, unflag)
1056        except RuntimeError, msg:
1057            if rcParams['verbose']:
1058                print_log()
1059                asaplog.push( str(msg) )
1060                print_log('ERROR')
1061                return
1062            else: raise
1063        self._add_history("flag_row", varlist)
1064
1065    def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
[1846]1066        """\
[1819]1067        Flag the selected data outside a specified range (in channel-base)
[1846]1068
[1819]1069        Parameters:
[1846]1070
[1819]1071            uthres:      upper threshold.
[1855]1072
[1819]1073            dthres:      lower threshold
[1846]1074
[1819]1075            clipoutside: True for flagging data outside the range [dthres:uthres].
1076                         False for glagging data inside the range.
[1855]1077
[1846]1078            unflag:      if True, unflag the data.
1079
[1819]1080        """
1081        varlist = vars()
1082        try:
1083            self._clip(uthres, dthres, clipoutside, unflag)
1084        except RuntimeError, msg:
1085            if rcParams['verbose']:
1086                print_log()
1087                asaplog.push(str(msg))
1088                print_log('ERROR')
1089                return
1090            else: raise
1091        self._add_history("clip", varlist)
1092
[1589]1093    @print_log_dec
[1584]1094    def lag_flag(self, start, end, unit="MHz", insitu=None):
[1846]1095        """\
[1192]1096        Flag the data in 'lag' space by providing a frequency to remove.
[1584]1097        Flagged data in the scantable gets interpolated over the region.
[1192]1098        No taper is applied.
[1846]1099
[1192]1100        Parameters:
[1846]1101
[1579]1102            start:    the start frequency (really a period within the
1103                      bandwidth)  or period to remove
[1855]1104
[1579]1105            end:      the end frequency or period to remove
[1855]1106
[1584]1107            unit:     the frequency unit (default "MHz") or "" for
[1579]1108                      explicit lag channels
[1846]1109
1110        *Notes*:
1111
[1579]1112            It is recommended to flag edges of the band or strong
[1348]1113            signals beforehand.
[1846]1114
[1192]1115        """
1116        if insitu is None: insitu = rcParams['insitu']
1117        self._math._setinsitu(insitu)
1118        varlist = vars()
[1579]1119        base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1120        if not (unit == "" or base.has_key(unit)):
[1192]1121            raise ValueError("%s is not a valid unit." % unit)
1122        try:
[1579]1123            if unit == "":
1124                s = scantable(self._math._lag_flag(self, start, end, "lags"))
1125            else:
1126                s = scantable(self._math._lag_flag(self, start*base[unit],
1127                                                   end*base[unit], "frequency"))
[1192]1128        except RuntimeError, msg:
1129            if rcParams['verbose']:
[1819]1130                #print msg
1131                print_log()
1132                asaplog.push( str(msg) )
1133                print_log( 'ERROR' )
[1192]1134                return
1135            else: raise
1136        s._add_history("lag_flag", varlist)
1137        if insitu:
1138            self._assign(s)
1139        else:
1140            return s
[1001]1141
[1589]1142    @print_log_dec
[113]1143    def create_mask(self, *args, **kwargs):
[1846]1144        """\
[1118]1145        Compute and return a mask based on [min, max] windows.
[189]1146        The specified windows are to be INCLUDED, when the mask is
[113]1147        applied.
[1846]1148
[102]1149        Parameters:
[1846]1150
[1118]1151            [min, max], [min2, max2], ...
[1024]1152                Pairs of start/end points (inclusive)specifying the regions
[102]1153                to be masked
[1855]1154
[189]1155            invert:     optional argument. If specified as True,
1156                        return an inverted mask, i.e. the regions
1157                        specified are EXCLUDED
[1855]1158
[513]1159            row:        create the mask using the specified row for
1160                        unit conversions, default is row=0
1161                        only necessary if frequency varies over rows.
[1846]1162
1163        Examples::
1164
[113]1165            scan.set_unit('channel')
[1846]1166            # a)
[1118]1167            msk = scan.create_mask([400, 500], [800, 900])
[189]1168            # masks everything outside 400 and 500
[113]1169            # and 800 and 900 in the unit 'channel'
1170
[1846]1171            # b)
[1118]1172            msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]1173            # masks the regions between 400 and 500
[113]1174            # and 800 and 900 in the unit 'channel'
[1846]1175
1176            # c)
1177            #mask only channel 400
[1554]1178            msk =  scan.create_mask([400])
[1846]1179
[102]1180        """
[1554]1181        row = kwargs.get("row", 0)
[513]1182        data = self._getabcissa(row)
[113]1183        u = self._getcoordinfo()[0]
[718]1184        if rcParams['verbose']:
[113]1185            if u == "": u = "channel"
[718]1186            msg = "The current mask window unit is %s" % u
[1118]1187            i = self._check_ifs()
1188            if not i:
[876]1189                msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
[718]1190            asaplog.push(msg)
[102]1191        n = self.nchan()
[1295]1192        msk = _n_bools(n, False)
[710]1193        # test if args is a 'list' or a 'normal *args - UGLY!!!
1194
[1118]1195        ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1196             and args or args[0]
[710]1197        for window in ws:
[1554]1198            if len(window) == 1:
1199                window = [window[0], window[0]]
1200            if len(window) == 0 or  len(window) > 2:
1201                raise ValueError("A window needs to be defined as [start(, end)]")
[1545]1202            if window[0] > window[1]:
1203                tmp = window[0]
1204                window[0] = window[1]
1205                window[1] = tmp
[102]1206            for i in range(n):
[1024]1207                if data[i] >= window[0] and data[i] <= window[1]:
[1295]1208                    msk[i] = True
[113]1209        if kwargs.has_key('invert'):
1210            if kwargs.get('invert'):
[1295]1211                msk = mask_not(msk)
[102]1212        return msk
[710]1213
[1819]1214    def get_masklist(self, mask=None, row=0):
[1846]1215        """\
[1819]1216        Compute and return a list of mask windows, [min, max].
[1846]1217
[1819]1218        Parameters:
[1846]1219
[1819]1220            mask:       channel mask, created with create_mask.
[1855]1221
[1819]1222            row:        calcutate the masklist using the specified row
1223                        for unit conversions, default is row=0
1224                        only necessary if frequency varies over rows.
[1846]1225
[1819]1226        Returns:
[1846]1227
[1819]1228            [min, max], [min2, max2], ...
1229                Pairs of start/end points (inclusive)specifying
1230                the masked regions
[1846]1231
[1819]1232        """
1233        if not (isinstance(mask,list) or isinstance(mask, tuple)):
1234            raise TypeError("The mask should be list or tuple.")
1235        if len(mask) < 2:
1236            raise TypeError("The mask elements should be > 1")
1237        if self.nchan() != len(mask):
1238            msg = "Number of channels in scantable != number of mask elements"
1239            raise TypeError(msg)
1240        data = self._getabcissa(row)
1241        u = self._getcoordinfo()[0]
1242        if rcParams['verbose']:
1243            if u == "": u = "channel"
1244            msg = "The current mask window unit is %s" % u
1245            i = self._check_ifs()
1246            if not i:
1247                msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1248            asaplog.push(msg)
1249        masklist=[]
1250        ist, ien = None, None
1251        ist, ien=self.get_mask_indices(mask)
1252        if ist is not None and ien is not None:
1253            for i in xrange(len(ist)):
1254                range=[data[ist[i]],data[ien[i]]]
1255                range.sort()
1256                masklist.append([range[0],range[1]])
1257        return masklist
1258
1259    def get_mask_indices(self, mask=None):
[1846]1260        """\
[1819]1261        Compute and Return lists of mask start indices and mask end indices.
[1855]1262
1263        Parameters:
1264
[1819]1265            mask:       channel mask, created with create_mask.
[1846]1266
[1819]1267        Returns:
[1846]1268
[1819]1269            List of mask start indices and that of mask end indices,
1270            i.e., [istart1,istart2,....], [iend1,iend2,....].
[1846]1271
[1819]1272        """
1273        if not (isinstance(mask,list) or isinstance(mask, tuple)):
1274            raise TypeError("The mask should be list or tuple.")
1275        if len(mask) < 2:
1276            raise TypeError("The mask elements should be > 1")
1277        istart=[]
1278        iend=[]
1279        if mask[0]: istart.append(0)
1280        for i in range(len(mask)-1):
1281            if not mask[i] and mask[i+1]:
1282                istart.append(i+1)
1283            elif mask[i] and not mask[i+1]:
1284                iend.append(i)
1285        if mask[len(mask)-1]: iend.append(len(mask)-1)
1286        if len(istart) != len(iend):
1287            raise RuntimeError("Numbers of mask start != mask end.")
1288        for i in range(len(istart)):
1289            if istart[i] > iend[i]:
1290                raise RuntimeError("Mask start index > mask end index")
1291                break
1292        return istart,iend
1293
1294#    def get_restfreqs(self):
1295#        """
1296#        Get the restfrequency(s) stored in this scantable.
1297#        The return value(s) are always of unit 'Hz'
1298#        Parameters:
1299#            none
1300#        Returns:
1301#            a list of doubles
1302#        """
1303#        return list(self._getrestfreqs())
1304
1305    def get_restfreqs(self, ids=None):
[1846]1306        """\
[256]1307        Get the restfrequency(s) stored in this scantable.
1308        The return value(s) are always of unit 'Hz'
[1846]1309
[256]1310        Parameters:
[1846]1311
[1819]1312            ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1313                 be retrieved
[1846]1314
[256]1315        Returns:
[1846]1316
[1819]1317            dictionary containing ids and a list of doubles for each id
[1846]1318
[256]1319        """
[1819]1320        if ids is None:
1321            rfreqs={}
1322            idlist = self.getmolnos()
1323            for i in idlist:
1324                rfreqs[i]=list(self._getrestfreqs(i))
1325            return rfreqs
1326        else:
1327            if type(ids)==list or type(ids)==tuple:
1328                rfreqs={}
1329                for i in ids:
1330                    rfreqs[i]=list(self._getrestfreqs(i))
1331                return rfreqs
1332            else:
1333                return list(self._getrestfreqs(ids))
1334            #return list(self._getrestfreqs(ids))
[102]1335
[931]1336    def set_restfreqs(self, freqs=None, unit='Hz'):
[1846]1337        """\
[931]1338        Set or replace the restfrequency specified and
1339        If the 'freqs' argument holds a scalar,
1340        then that rest frequency will be applied to all the selected
1341        data.  If the 'freqs' argument holds
1342        a vector, then it MUST be of equal or smaller length than
1343        the number of IFs (and the available restfrequencies will be
1344        replaced by this vector).  In this case, *all* data have
1345        the restfrequency set per IF according
1346        to the corresponding value you give in the 'freqs' vector.
[1118]1347        E.g. 'freqs=[1e9, 2e9]'  would mean IF 0 gets restfreq 1e9 and
[931]1348        IF 1 gets restfreq 2e9.
[1846]1349
[1395]1350        You can also specify the frequencies via a linecatalog.
[1153]1351
[931]1352        Parameters:
[1846]1353
[931]1354            freqs:   list of rest frequency values or string idenitfiers
[1855]1355
[931]1356            unit:    unit for rest frequency (default 'Hz')
[402]1357
[1846]1358
1359        Example::
1360
[1819]1361            # set the given restfrequency for the all currently selected IFs
[931]1362            scan.set_restfreqs(freqs=1.4e9)
[1845]1363            # set restfrequencies for the n IFs  (n > 1) in the order of the
1364            # list, i.e
1365            # IF0 -> 1.4e9, IF1 ->  1.41e9, IF3 -> 1.42e9
1366            # len(list_of_restfreqs) == nIF
1367            # for nIF == 1 the following will set multiple restfrequency for
1368            # that IF
[1819]1369            scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
[1845]1370            # set multiple restfrequencies per IF. as a list of lists where
1371            # the outer list has nIF elements, the inner s arbitrary
1372            scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
[391]1373
[1846]1374       *Note*:
[1845]1375
[931]1376            To do more sophisticate Restfrequency setting, e.g. on a
1377            source and IF basis, use scantable.set_selection() before using
[1846]1378            this function::
[931]1379
[1846]1380                # provided your scantable is called scan
1381                selection = selector()
1382                selection.set_name("ORION*")
1383                selection.set_ifs([1])
1384                scan.set_selection(selection)
1385                scan.set_restfreqs(freqs=86.6e9)
1386
[931]1387        """
1388        varlist = vars()
[1157]1389        from asap import linecatalog
1390        # simple  value
[1118]1391        if isinstance(freqs, int) or isinstance(freqs, float):
[1845]1392            self._setrestfreqs([freqs], [""], unit)
[1157]1393        # list of values
[1118]1394        elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]1395            # list values are scalars
[1118]1396            if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[1845]1397                if len(freqs) == 1:
1398                    self._setrestfreqs(freqs, [""], unit)
1399                else:
1400                    # allow the 'old' mode of setting mulitple IFs
1401                    sel = selector()
1402                    savesel = self._getselection()
1403                    iflist = self.getifnos()
1404                    if len(freqs)>len(iflist):
1405                        raise ValueError("number of elements in list of list "
1406                                         "exeeds the current IF selections")
1407                    iflist = self.getifnos()
1408                    for i, fval in enumerate(freqs):
1409                        sel.set_ifs(iflist[i])
1410                        self._setselection(sel)
1411                        self._setrestfreqs([fval], [""], unit)
1412                    self._setselection(savesel)
1413
1414            # list values are dict, {'value'=, 'name'=)
[1157]1415            elif isinstance(freqs[-1], dict):
[1845]1416                values = []
1417                names = []
1418                for d in freqs:
1419                    values.append(d["value"])
1420                    names.append(d["name"])
1421                self._setrestfreqs(values, names, unit)
[1819]1422            elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
[1157]1423                sel = selector()
1424                savesel = self._getselection()
[1322]1425                iflist = self.getifnos()
[1819]1426                if len(freqs)>len(iflist):
[1845]1427                    raise ValueError("number of elements in list of list exeeds"
1428                                     " the current IF selections")
1429                for i, fval in enumerate(freqs):
[1322]1430                    sel.set_ifs(iflist[i])
[1259]1431                    self._setselection(sel)
[1845]1432                    self._setrestfreqs(fval, [""], unit)
[1157]1433                self._setselection(savesel)
1434        # freqs are to be taken from a linecatalog
[1153]1435        elif isinstance(freqs, linecatalog):
1436            sel = selector()
1437            savesel = self._getselection()
1438            for i in xrange(freqs.nrow()):
[1322]1439                sel.set_ifs(iflist[i])
[1153]1440                self._setselection(sel)
[1845]1441                self._setrestfreqs([freqs.get_frequency(i)],
1442                                   [freqs.get_name(i)], "MHz")
[1153]1443                # ensure that we are not iterating past nIF
1444                if i == self.nif()-1: break
1445            self._setselection(savesel)
[931]1446        else:
1447            return
1448        self._add_history("set_restfreqs", varlist)
1449
[1360]1450    def shift_refpix(self, delta):
[1846]1451        """\
[1589]1452        Shift the reference pixel of the Spectra Coordinate by an
1453        integer amount.
[1846]1454
[1589]1455        Parameters:
[1846]1456
[1589]1457            delta:   the amount to shift by
[1846]1458
1459        *Note*:
1460
[1589]1461            Be careful using this with broadband data.
[1846]1462
[1360]1463        """
[1731]1464        Scantable.shift_refpix(self, delta)
[931]1465
[1259]1466    def history(self, filename=None):
[1846]1467        """\
[1259]1468        Print the history. Optionally to a file.
[1846]1469
[1348]1470        Parameters:
[1846]1471
[1348]1472            filename:    The name  of the file to save the history to.
[1846]1473
[1259]1474        """
[484]1475        hist = list(self._gethistory())
[794]1476        out = "-"*80
[484]1477        for h in hist:
[489]1478            if h.startswith("---"):
[1857]1479                out = "\n".join([out, h])
[489]1480            else:
1481                items = h.split("##")
1482                date = items[0]
1483                func = items[1]
1484                items = items[2:]
[794]1485                out += "\n"+date+"\n"
1486                out += "Function: %s\n  Parameters:" % (func)
[489]1487                for i in items:
1488                    s = i.split("=")
[1118]1489                    out += "\n   %s = %s" % (s[0], s[1])
[1857]1490                out = "\n".join([out, "-"*80])
[1259]1491        if filename is not None:
1492            if filename is "":
1493                filename = 'scantable_history.txt'
1494            import os
1495            filename = os.path.expandvars(os.path.expanduser(filename))
1496            if not os.path.isdir(filename):
1497                data = open(filename, 'w')
1498                data.write(out)
1499                data.close()
1500            else:
1501                msg = "Illegal file name '%s'." % (filename)
1502                if rcParams['verbose']:
[1819]1503                    #print msg
1504                    asaplog.push( msg )
1505                    print_log( 'ERROR' )
[1259]1506                else:
1507                    raise IOError(msg)
1508        if rcParams['verbose']:
1509            try:
1510                from IPython.genutils import page as pager
1511            except ImportError:
1512                from pydoc import pager
1513            pager(out)
1514        else:
1515            return out
[484]1516        return
[513]1517    #
1518    # Maths business
1519    #
[1589]1520    @print_log_dec
[931]1521    def average_time(self, mask=None, scanav=False, weight='tint', align=False):
[1846]1522        """\
[1070]1523        Return the (time) weighted average of a scan.
[1846]1524
1525        *Note*:
1526
[1070]1527            in channels only - align if necessary
[1846]1528
[513]1529        Parameters:
[1846]1530
[513]1531            mask:     an optional mask (only used for 'var' and 'tsys'
1532                      weighting)
[1855]1533
[558]1534            scanav:   True averages each scan separately
1535                      False (default) averages all scans together,
[1855]1536
[1099]1537            weight:   Weighting scheme.
1538                      'none'     (mean no weight)
1539                      'var'      (1/var(spec) weighted)
1540                      'tsys'     (1/Tsys**2 weighted)
1541                      'tint'     (integration time weighted)
1542                      'tintsys'  (Tint/Tsys**2)
1543                      'median'   ( median averaging)
[535]1544                      The default is 'tint'
[1855]1545
[931]1546            align:    align the spectra in velocity before averaging. It takes
1547                      the time of the first spectrum as reference time.
[1846]1548
1549        Example::
1550
[513]1551            # time average the scantable without using a mask
[710]1552            newscan = scan.average_time()
[1846]1553
[513]1554        """
1555        varlist = vars()
[1593]1556        weight = weight or 'TINT'
1557        mask = mask or ()
1558        scanav = (scanav and 'SCAN') or 'NONE'
[1118]1559        scan = (self, )
[989]1560        try:
[1118]1561            if align:
1562                scan = (self.freq_align(insitu=False), )
1563            s = None
1564            if weight.upper() == 'MEDIAN':
1565                s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1566                                                         scanav))
1567            else:
1568                s = scantable(self._math._average(scan, mask, weight.upper(),
1569                              scanav))
1570        except RuntimeError, msg:
[989]1571            if rcParams['verbose']:
[1819]1572                #print msg
1573                print_log()
1574                asaplog.push( str(msg) )
1575                print_log( 'ERROR' )
[989]1576                return
1577            else: raise
[1099]1578        s._add_history("average_time", varlist)
[513]1579        return s
[710]1580
[1589]1581    @print_log_dec
[876]1582    def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[1846]1583        """\
[513]1584        Return a scan where all spectra are converted to either
1585        Jansky or Kelvin depending upon the flux units of the scan table.
1586        By default the function tries to look the values up internally.
1587        If it can't find them (or if you want to over-ride), you must
1588        specify EITHER jyperk OR eta (and D which it will try to look up
1589        also if you don't set it). jyperk takes precedence if you set both.
[1846]1590
[513]1591        Parameters:
[1846]1592
[513]1593            jyperk:      the Jy / K conversion factor
[1855]1594
[513]1595            eta:         the aperture efficiency
[1855]1596
[513]1597            d:           the geomtric diameter (metres)
[1855]1598
[513]1599            insitu:      if False a new scantable is returned.
1600                         Otherwise, the scaling is done in-situ
1601                         The default is taken from .asaprc (False)
[1846]1602
[513]1603        """
1604        if insitu is None: insitu = rcParams['insitu']
[876]1605        self._math._setinsitu(insitu)
[513]1606        varlist = vars()
[1593]1607        jyperk = jyperk or -1.0
1608        d = d or -1.0
1609        eta = eta or -1.0
[876]1610        s = scantable(self._math._convertflux(self, d, eta, jyperk))
1611        s._add_history("convert_flux", varlist)
1612        if insitu: self._assign(s)
1613        else: return s
[513]1614
[1589]1615    @print_log_dec
[876]1616    def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[1846]1617        """\
[513]1618        Return a scan after applying a gain-elevation correction.
1619        The correction can be made via either a polynomial or a
1620        table-based interpolation (and extrapolation if necessary).
1621        You specify polynomial coefficients, an ascii table or neither.
1622        If you specify neither, then a polynomial correction will be made
1623        with built in coefficients known for certain telescopes (an error
1624        will occur if the instrument is not known).
1625        The data and Tsys are *divided* by the scaling factors.
[1846]1626
[513]1627        Parameters:
[1846]1628
[513]1629            poly:        Polynomial coefficients (default None) to compute a
1630                         gain-elevation correction as a function of
1631                         elevation (in degrees).
[1855]1632
[513]1633            filename:    The name of an ascii file holding correction factors.
1634                         The first row of the ascii file must give the column
1635                         names and these MUST include columns
1636                         "ELEVATION" (degrees) and "FACTOR" (multiply data
1637                         by this) somewhere.
1638                         The second row must give the data type of the
1639                         column. Use 'R' for Real and 'I' for Integer.
1640                         An example file would be
1641                         (actual factors are arbitrary) :
1642
1643                         TIME ELEVATION FACTOR
1644                         R R R
1645                         0.1 0 0.8
1646                         0.2 20 0.85
1647                         0.3 40 0.9
1648                         0.4 60 0.85
1649                         0.5 80 0.8
1650                         0.6 90 0.75
[1855]1651
[513]1652            method:      Interpolation method when correcting from a table.
1653                         Values are  "nearest", "linear" (default), "cubic"
1654                         and "spline"
[1855]1655
[513]1656            insitu:      if False a new scantable is returned.
1657                         Otherwise, the scaling is done in-situ
1658                         The default is taken from .asaprc (False)
[1846]1659
[513]1660        """
1661
1662        if insitu is None: insitu = rcParams['insitu']
[876]1663        self._math._setinsitu(insitu)
[513]1664        varlist = vars()
[1593]1665        poly = poly or ()
[513]1666        from os.path import expandvars
1667        filename = expandvars(filename)
[876]1668        s = scantable(self._math._gainel(self, poly, filename, method))
1669        s._add_history("gain_el", varlist)
[1593]1670        if insitu:
1671            self._assign(s)
1672        else:
1673            return s
[710]1674
[1589]1675    @print_log_dec
[931]1676    def freq_align(self, reftime=None, method='cubic', insitu=None):
[1846]1677        """\
[513]1678        Return a scan where all rows have been aligned in frequency/velocity.
1679        The alignment frequency frame (e.g. LSRK) is that set by function
1680        set_freqframe.
[1846]1681
[513]1682        Parameters:
[1855]1683
[513]1684            reftime:     reference time to align at. By default, the time of
1685                         the first row of data is used.
[1855]1686
[513]1687            method:      Interpolation method for regridding the spectra.
1688                         Choose from "nearest", "linear", "cubic" (default)
1689                         and "spline"
[1855]1690
[513]1691            insitu:      if False a new scantable is returned.
1692                         Otherwise, the scaling is done in-situ
1693                         The default is taken from .asaprc (False)
[1846]1694
[513]1695        """
[931]1696        if insitu is None: insitu = rcParams["insitu"]
[876]1697        self._math._setinsitu(insitu)
[513]1698        varlist = vars()
[1593]1699        reftime = reftime or ""
[931]1700        s = scantable(self._math._freq_align(self, reftime, method))
[876]1701        s._add_history("freq_align", varlist)
1702        if insitu: self._assign(s)
1703        else: return s
[513]1704
[1589]1705    @print_log_dec
[1725]1706    def opacity(self, tau=None, insitu=None):
[1846]1707        """\
[513]1708        Apply an opacity correction. The data
1709        and Tsys are multiplied by the correction factor.
[1846]1710
[513]1711        Parameters:
[1855]1712
[1689]1713            tau:         (list of) opacity from which the correction factor is
[513]1714                         exp(tau*ZD)
[1689]1715                         where ZD is the zenith-distance.
1716                         If a list is provided, it has to be of length nIF,
1717                         nIF*nPol or 1 and in order of IF/POL, e.g.
1718                         [opif0pol0, opif0pol1, opif1pol0 ...]
[1725]1719                         if tau is `None` the opacities are determined from a
1720                         model.
[1855]1721
[513]1722            insitu:      if False a new scantable is returned.
1723                         Otherwise, the scaling is done in-situ
1724                         The default is taken from .asaprc (False)
[1846]1725
[513]1726        """
1727        if insitu is None: insitu = rcParams['insitu']
[876]1728        self._math._setinsitu(insitu)
[513]1729        varlist = vars()
[1689]1730        if not hasattr(tau, "__len__"):
1731            tau = [tau]
[876]1732        s = scantable(self._math._opacity(self, tau))
1733        s._add_history("opacity", varlist)
1734        if insitu: self._assign(s)
1735        else: return s
[513]1736
[1589]1737    @print_log_dec
[513]1738    def bin(self, width=5, insitu=None):
[1846]1739        """\
[513]1740        Return a scan where all spectra have been binned up.
[1846]1741
[1348]1742        Parameters:
[1846]1743
[513]1744            width:       The bin width (default=5) in pixels
[1855]1745
[513]1746            insitu:      if False a new scantable is returned.
1747                         Otherwise, the scaling is done in-situ
1748                         The default is taken from .asaprc (False)
[1846]1749
[513]1750        """
1751        if insitu is None: insitu = rcParams['insitu']
[876]1752        self._math._setinsitu(insitu)
[513]1753        varlist = vars()
[876]1754        s = scantable(self._math._bin(self, width))
[1118]1755        s._add_history("bin", varlist)
[1589]1756        if insitu:
1757            self._assign(s)
1758        else:
1759            return s
[513]1760
[1589]1761    @print_log_dec
[513]1762    def resample(self, width=5, method='cubic', insitu=None):
[1846]1763        """\
[1348]1764        Return a scan where all spectra have been binned up.
[1573]1765
[1348]1766        Parameters:
[1846]1767
[513]1768            width:       The bin width (default=5) in pixels
[1855]1769
[513]1770            method:      Interpolation method when correcting from a table.
1771                         Values are  "nearest", "linear", "cubic" (default)
1772                         and "spline"
[1855]1773
[513]1774            insitu:      if False a new scantable is returned.
1775                         Otherwise, the scaling is done in-situ
1776                         The default is taken from .asaprc (False)
[1846]1777
[513]1778        """
1779        if insitu is None: insitu = rcParams['insitu']
[876]1780        self._math._setinsitu(insitu)
[513]1781        varlist = vars()
[876]1782        s = scantable(self._math._resample(self, method, width))
[1118]1783        s._add_history("resample", varlist)
[876]1784        if insitu: self._assign(s)
1785        else: return s
[513]1786
[1589]1787    @print_log_dec
[946]1788    def average_pol(self, mask=None, weight='none'):
[1846]1789        """\
[946]1790        Average the Polarisations together.
[1846]1791
[946]1792        Parameters:
[1846]1793
[946]1794            mask:        An optional mask defining the region, where the
1795                         averaging will be applied. The output will have all
1796                         specified points masked.
[1855]1797
[946]1798            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1799                         weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]1800
[946]1801        """
1802        varlist = vars()
[1593]1803        mask = mask or ()
[1010]1804        s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]1805        s._add_history("average_pol", varlist)
[992]1806        return s
[513]1807
[1589]1808    @print_log_dec
[1145]1809    def average_beam(self, mask=None, weight='none'):
[1846]1810        """\
[1145]1811        Average the Beams together.
[1846]1812
[1145]1813        Parameters:
1814            mask:        An optional mask defining the region, where the
1815                         averaging will be applied. The output will have all
1816                         specified points masked.
[1855]1817
[1145]1818            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1819                         weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]1820
[1145]1821        """
1822        varlist = vars()
[1593]1823        mask = mask or ()
[1145]1824        s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1825        s._add_history("average_beam", varlist)
1826        return s
1827
[1586]1828    def parallactify(self, pflag):
[1846]1829        """\
[1843]1830        Set a flag to indicate whether this data should be treated as having
[1617]1831        been 'parallactified' (total phase == 0.0)
[1846]1832
[1617]1833        Parameters:
[1855]1834
[1843]1835            pflag:  Bool indicating whether to turn this on (True) or
[1617]1836                    off (False)
[1846]1837
[1617]1838        """
[1586]1839        varlist = vars()
1840        self._parallactify(pflag)
1841        self._add_history("parallactify", varlist)
1842
[1589]1843    @print_log_dec
[992]1844    def convert_pol(self, poltype=None):
[1846]1845        """\
[992]1846        Convert the data to a different polarisation type.
[1565]1847        Note that you will need cross-polarisation terms for most conversions.
[1846]1848
[992]1849        Parameters:
[1855]1850
[992]1851            poltype:    The new polarisation type. Valid types are:
[1565]1852                        "linear", "circular", "stokes" and "linpol"
[1846]1853
[992]1854        """
1855        varlist = vars()
1856        try:
1857            s = scantable(self._math._convertpol(self, poltype))
[1118]1858        except RuntimeError, msg:
[992]1859            if rcParams['verbose']:
[1819]1860                #print msg
1861                print_log()
1862                asaplog.push( str(msg) )
1863                print_log( 'ERROR' )
[1118]1864                return
[992]1865            else:
1866                raise
[1118]1867        s._add_history("convert_pol", varlist)
[992]1868        return s
1869
[1589]1870    @print_log_dec
[1819]1871    def smooth(self, kernel="hanning", width=5.0, order=2, plot=False, insitu=None):
[1846]1872        """\
[513]1873        Smooth the spectrum by the specified kernel (conserving flux).
[1846]1874
[513]1875        Parameters:
[1846]1876
[513]1877            kernel:     The type of smoothing kernel. Select from
[1574]1878                        'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1879                        or 'poly'
[1855]1880
[513]1881            width:      The width of the kernel in pixels. For hanning this is
1882                        ignored otherwise it defauls to 5 pixels.
1883                        For 'gaussian' it is the Full Width Half
1884                        Maximum. For 'boxcar' it is the full width.
[1574]1885                        For 'rmedian' and 'poly' it is the half width.
[1855]1886
[1574]1887            order:      Optional parameter for 'poly' kernel (default is 2), to
1888                        specify the order of the polnomial. Ignored by all other
1889                        kernels.
[1855]1890
[1819]1891            plot:       plot the original and the smoothed spectra.
1892                        In this each indivual fit has to be approved, by
1893                        typing 'y' or 'n'
[1855]1894
[513]1895            insitu:     if False a new scantable is returned.
1896                        Otherwise, the scaling is done in-situ
1897                        The default is taken from .asaprc (False)
[1846]1898
[513]1899        """
1900        if insitu is None: insitu = rcParams['insitu']
[876]1901        self._math._setinsitu(insitu)
[513]1902        varlist = vars()
[1819]1903
1904        if plot: orgscan = self.copy()
1905
[1574]1906        s = scantable(self._math._smooth(self, kernel.lower(), width, order))
[876]1907        s._add_history("smooth", varlist)
[1819]1908
1909        if plot:
1910            if rcParams['plotter.gui']:
1911                from asap.asaplotgui import asaplotgui as asaplot
1912            else:
1913                from asap.asaplot import asaplot
1914            self._p=asaplot()
1915            self._p.set_panels()
1916            ylab=s._get_ordinate_label()
1917            #self._p.palette(0,["#777777","red"])
1918            for r in xrange(s.nrow()):
1919                xsm=s._getabcissa(r)
1920                ysm=s._getspectrum(r)
1921                xorg=orgscan._getabcissa(r)
1922                yorg=orgscan._getspectrum(r)
1923                self._p.clear()
1924                self._p.hold()
1925                self._p.set_axes('ylabel',ylab)
1926                self._p.set_axes('xlabel',s._getabcissalabel(r))
1927                self._p.set_axes('title',s._getsourcename(r))
1928                self._p.set_line(label='Original',color="#777777")
1929                self._p.plot(xorg,yorg)
1930                self._p.set_line(label='Smoothed',color="red")
1931                self._p.plot(xsm,ysm)
1932                ### Ugly part for legend
1933                for i in [0,1]:
1934                    self._p.subplots[0]['lines'].append([self._p.subplots[0]['axes'].lines[i]])
1935                self._p.release()
1936                ### Ugly part for legend
1937                self._p.subplots[0]['lines']=[]
1938                res = raw_input("Accept smoothing ([y]/n): ")
1939                if res.upper() == 'N':
1940                    s._setspectrum(yorg, r)
1941            self._p.unmap()
1942            self._p = None
1943            del orgscan
1944
[876]1945        if insitu: self._assign(s)
1946        else: return s
[513]1947
[1589]1948    @print_log_dec
[1617]1949    def poly_baseline(self, mask=None, order=0, plot=False, uselin=False,
1950                      insitu=None):
[1846]1951        """\
[513]1952        Return a scan which has been baselined (all rows) by a polynomial.
[1846]1953
[513]1954        Parameters:
[1846]1955
[794]1956            mask:       an optional mask
[1855]1957
[794]1958            order:      the order of the polynomial (default is 0)
[1855]1959
[1061]1960            plot:       plot the fit and the residual. In this each
1961                        indivual fit has to be approved, by typing 'y'
1962                        or 'n'
[1855]1963
[1391]1964            uselin:     use linear polynomial fit
[1855]1965
[794]1966            insitu:     if False a new scantable is returned.
1967                        Otherwise, the scaling is done in-situ
1968                        The default is taken from .asaprc (False)
[1846]1969
1970        Example::
1971
[513]1972            # return a scan baselined by a third order polynomial,
1973            # not using a mask
1974            bscan = scan.poly_baseline(order=3)
[1846]1975
[579]1976        """
[513]1977        if insitu is None: insitu = rcParams['insitu']
[1819]1978        if not insitu:
1979            workscan = self.copy()
1980        else:
1981            workscan = self
[513]1982        varlist = vars()
1983        if mask is None:
[1295]1984            mask = [True for i in xrange(self.nchan(-1))]
[1819]1985
[513]1986        from asap.asapfitter import fitter
[1217]1987        try:
1988            f = fitter()
[1391]1989            if uselin:
1990                f.set_function(lpoly=order)
1991            else:
1992                f.set_function(poly=order)
[1819]1993
1994            rows = range(workscan.nrow())
1995            if len(rows) > 0:
1996                self.blpars = []
1997
1998            for r in rows:
1999                # take into account flagtra info (CAS-1434)
2000                flagtra = workscan._getmask(r)
2001                actualmask = mask[:]
2002                if len(actualmask) == 0:
2003                    actualmask = list(flagtra[:])
2004                else:
2005                    if len(actualmask) != len(flagtra):
2006                        raise RuntimeError, "Mask and flagtra have different length"
2007                    else:
2008                        for i in range(0, len(actualmask)):
2009                            actualmask[i] = actualmask[i] and flagtra[i]
2010                f.set_scan(workscan, actualmask)
2011                f.x = workscan._getabcissa(r)
2012                f.y = workscan._getspectrum(r)
2013                f.data = None
2014                f.fit()
2015                if plot:
2016                    f.plot(residual=True)
2017                    x = raw_input("Accept fit ( [y]/n ): ")
2018                    if x.upper() == 'N':
2019                        self.blpars.append(None)
2020                        continue
2021                workscan._setspectrum(f.fitter.getresidual(), r)
2022                self.blpars.append(f.get_parameters())
2023
2024            if plot:
2025                f._p.unmap()
2026                f._p = None
2027            workscan._add_history("poly_baseline", varlist)
[1856]2028            if insitu:
2029                self._assign(workscan)
2030            else:
2031                return workscan
[1217]2032        except RuntimeError:
2033            msg = "The fit failed, possibly because it didn't converge."
2034            if rcParams['verbose']:
[1819]2035                #print msg
2036                print_log()
2037                asaplog.push( str(msg) )
2038                print_log( 'ERROR' )
[1217]2039                return
2040            else:
2041                raise RuntimeError(msg)
[513]2042
[1819]2043
[1118]2044    def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
[1280]2045                           threshold=3, chan_avg_limit=1, plot=False,
2046                           insitu=None):
[1846]2047        """\
[880]2048        Return a scan which has been baselined (all rows) by a polynomial.
2049        Spectral lines are detected first using linefinder and masked out
2050        to avoid them affecting the baseline solution.
2051
2052        Parameters:
[1846]2053
[880]2054            mask:       an optional mask retreived from scantable
[1846]2055
2056            edge:       an optional number of channel to drop at the edge of
2057                        spectrum. If only one value is
[880]2058                        specified, the same number will be dropped from
2059                        both sides of the spectrum. Default is to keep
[907]2060                        all channels. Nested tuples represent individual
[976]2061                        edge selection for different IFs (a number of spectral
2062                        channels can be different)
[1846]2063
[880]2064            order:      the order of the polynomial (default is 0)
[1846]2065
[880]2066            threshold:  the threshold used by line finder. It is better to
2067                        keep it large as only strong lines affect the
2068                        baseline solution.
[1846]2069
[1280]2070            chan_avg_limit:
2071                        a maximum number of consequtive spectral channels to
2072                        average during the search of weak and broad lines.
2073                        The default is no averaging (and no search for weak
2074                        lines). If such lines can affect the fitted baseline
2075                        (e.g. a high order polynomial is fitted), increase this
2076                        parameter (usually values up to 8 are reasonable). Most
2077                        users of this method should find the default value
2078                        sufficient.
[1846]2079
[1061]2080            plot:       plot the fit and the residual. In this each
2081                        indivual fit has to be approved, by typing 'y'
2082                        or 'n'
[1846]2083
[880]2084            insitu:     if False a new scantable is returned.
2085                        Otherwise, the scaling is done in-situ
2086                        The default is taken from .asaprc (False)
2087
[1846]2088
2089        Example::
2090
2091            scan2 = scan.auto_poly_baseline(order=7, insitu=False)
2092
[880]2093        """
2094        if insitu is None: insitu = rcParams['insitu']
2095        varlist = vars()
2096        from asap.asapfitter import fitter
2097        from asap.asaplinefind import linefinder
2098        from asap import _is_sequence_or_number as _is_valid
2099
[976]2100        # check whether edge is set up for each IF individually
[1118]2101        individualedge = False;
2102        if len(edge) > 1:
2103            if isinstance(edge[0], list) or isinstance(edge[0], tuple):
2104                individualedge = True;
[907]2105
[1118]2106        if not _is_valid(edge, int) and not individualedge:
[909]2107            raise ValueError, "Parameter 'edge' has to be an integer or a \
[907]2108            pair of integers specified as a tuple. Nested tuples are allowed \
2109            to make individual selection for different IFs."
[919]2110
[1118]2111        curedge = (0, 0)
2112        if individualedge:
2113            for edgepar in edge:
2114                if not _is_valid(edgepar, int):
2115                    raise ValueError, "Each element of the 'edge' tuple has \
2116                                       to be a pair of integers or an integer."
[907]2117        else:
[1118]2118            curedge = edge;
[880]2119
2120        # setup fitter
2121        f = fitter()
2122        f.set_function(poly=order)
2123
2124        # setup line finder
[1118]2125        fl = linefinder()
[1268]2126        fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
[880]2127
2128        if not insitu:
[1118]2129            workscan = self.copy()
[880]2130        else:
[1118]2131            workscan = self
[880]2132
[907]2133        fl.set_scan(workscan)
2134
[1118]2135        rows = range(workscan.nrow())
[1819]2136        # Save parameters of baseline fits & masklists as a class attribute.
2137        # NOTICE: It does not reflect changes in scantable!
2138        if len(rows) > 0:
2139            self.blpars=[]
2140            self.masklists=[]
[880]2141        asaplog.push("Processing:")
2142        for r in rows:
[1118]2143            msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
2144                (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
2145                 workscan.getpol(r), workscan.getcycle(r))
[880]2146            asaplog.push(msg, False)
[907]2147
[976]2148            # figure out edge parameter
[1118]2149            if individualedge:
2150                if len(edge) >= workscan.getif(r):
2151                    raise RuntimeError, "Number of edge elements appear to " \
2152                                        "be less than the number of IFs"
2153                    curedge = edge[workscan.getif(r)]
[919]2154
[1819]2155            # take into account flagtra info (CAS-1434)
2156            flagtra = workscan._getmask(r)
2157            actualmask = mask[:]
2158            if len(actualmask) == 0:
2159                actualmask = list(flagtra[:])
2160            else:
2161                if len(actualmask) != len(flagtra):
2162                    raise RuntimeError, "Mask and flagtra have different length"
2163                else:
2164                    for i in range(0, len(actualmask)):
2165                        actualmask[i] = actualmask[i] and flagtra[i]
2166
[976]2167            # setup line finder
[1819]2168            fl.find_lines(r, actualmask, curedge)
2169            outmask=fl.get_mask()
2170            f.set_scan(workscan, fl.get_mask())
2171            f.x = workscan._getabcissa(r)
2172            f.y = workscan._getspectrum(r)
2173            f.data = None
[880]2174            f.fit()
[1819]2175
2176            # Show mask list
2177            masklist=workscan.get_masklist(fl.get_mask(),row=r)
2178            msg = "mask range: "+str(masklist)
2179            asaplog.push(msg, False)
2180
[1061]2181            if plot:
2182                f.plot(residual=True)
2183                x = raw_input("Accept fit ( [y]/n ): ")
2184                if x.upper() == 'N':
[1819]2185                    self.blpars.append(None)
2186                    self.masklists.append(None)
[1061]2187                    continue
[1819]2188
[880]2189            workscan._setspectrum(f.fitter.getresidual(), r)
[1819]2190            self.blpars.append(f.get_parameters())
2191            self.masklists.append(masklist)
[1061]2192        if plot:
2193            f._p.unmap()
2194            f._p = None
2195        workscan._add_history("auto_poly_baseline", varlist)
[880]2196        if insitu:
2197            self._assign(workscan)
2198        else:
2199            return workscan
2200
[1589]2201    @print_log_dec
[914]2202    def rotate_linpolphase(self, angle):
[1846]2203        """\
[914]2204        Rotate the phase of the complex polarization O=Q+iU correlation.
2205        This is always done in situ in the raw data.  So if you call this
2206        function more than once then each call rotates the phase further.
[1846]2207
[914]2208        Parameters:
[1846]2209
[914]2210            angle:   The angle (degrees) to rotate (add) by.
[1846]2211
2212        Example::
2213
[914]2214            scan.rotate_linpolphase(2.3)
[1846]2215
[914]2216        """
2217        varlist = vars()
[936]2218        self._math._rotate_linpolphase(self, angle)
[914]2219        self._add_history("rotate_linpolphase", varlist)
2220        return
[710]2221
[1589]2222    @print_log_dec
[914]2223    def rotate_xyphase(self, angle):
[1846]2224        """\
[914]2225        Rotate the phase of the XY correlation.  This is always done in situ
2226        in the data.  So if you call this function more than once
2227        then each call rotates the phase further.
[1846]2228
[914]2229        Parameters:
[1846]2230
[914]2231            angle:   The angle (degrees) to rotate (add) by.
[1846]2232
2233        Example::
2234
[914]2235            scan.rotate_xyphase(2.3)
[1846]2236
[914]2237        """
2238        varlist = vars()
[936]2239        self._math._rotate_xyphase(self, angle)
[914]2240        self._add_history("rotate_xyphase", varlist)
2241        return
2242
[1589]2243    @print_log_dec
[914]2244    def swap_linears(self):
[1846]2245        """\
[1573]2246        Swap the linear polarisations XX and YY, or better the first two
[1348]2247        polarisations as this also works for ciculars.
[914]2248        """
2249        varlist = vars()
[936]2250        self._math._swap_linears(self)
[914]2251        self._add_history("swap_linears", varlist)
2252        return
2253
[1589]2254    @print_log_dec
[914]2255    def invert_phase(self):
[1846]2256        """\
[914]2257        Invert the phase of the complex polarisation
2258        """
2259        varlist = vars()
[936]2260        self._math._invert_phase(self)
[914]2261        self._add_history("invert_phase", varlist)
2262        return
2263
[1589]2264    @print_log_dec
[876]2265    def add(self, offset, insitu=None):
[1846]2266        """\
[513]2267        Return a scan where all spectra have the offset added
[1846]2268
[513]2269        Parameters:
[1846]2270
[513]2271            offset:      the offset
[1855]2272
[513]2273            insitu:      if False a new scantable is returned.
2274                         Otherwise, the scaling is done in-situ
2275                         The default is taken from .asaprc (False)
[1846]2276
[513]2277        """
2278        if insitu is None: insitu = rcParams['insitu']
[876]2279        self._math._setinsitu(insitu)
[513]2280        varlist = vars()
[876]2281        s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]2282        s._add_history("add", varlist)
[876]2283        if insitu:
2284            self._assign(s)
2285        else:
[513]2286            return s
2287
[1589]2288    @print_log_dec
[1308]2289    def scale(self, factor, tsys=True, insitu=None):
[1846]2290        """\
2291
[513]2292        Return a scan where all spectra are scaled by the give 'factor'
[1846]2293
[513]2294        Parameters:
[1846]2295
[1819]2296            factor:      the scaling factor (float or 1D float list)
[1855]2297
[513]2298            insitu:      if False a new scantable is returned.
2299                         Otherwise, the scaling is done in-situ
2300                         The default is taken from .asaprc (False)
[1855]2301
[513]2302            tsys:        if True (default) then apply the operation to Tsys
2303                         as well as the data
[1846]2304
[513]2305        """
2306        if insitu is None: insitu = rcParams['insitu']
[876]2307        self._math._setinsitu(insitu)
[513]2308        varlist = vars()
[1819]2309        s = None
2310        import numpy
2311        if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
2312            if isinstance(factor[0], list) or isinstance(factor[0], numpy.ndarray):
2313                from asapmath import _array2dOp
2314                s = _array2dOp( self.copy(), factor, "MUL", tsys )
2315            else:
2316                s = scantable( self._math._arrayop( self.copy(), factor, "MUL", tsys ) )
2317        else:
2318            s = scantable(self._math._unaryop(self.copy(), factor, "MUL", tsys))
[1118]2319        s._add_history("scale", varlist)
[876]2320        if insitu:
2321            self._assign(s)
2322        else:
[513]2323            return s
2324
[1504]2325    def set_sourcetype(self, match, matchtype="pattern",
2326                       sourcetype="reference"):
[1846]2327        """\
[1502]2328        Set the type of the source to be an source or reference scan
[1846]2329        using the provided pattern.
2330
[1502]2331        Parameters:
[1846]2332
[1504]2333            match:          a Unix style pattern, regular expression or selector
[1855]2334
[1504]2335            matchtype:      'pattern' (default) UNIX style pattern or
2336                            'regex' regular expression
[1855]2337
[1502]2338            sourcetype:     the type of the source to use (source/reference)
[1846]2339
[1502]2340        """
2341        varlist = vars()
2342        basesel = self.get_selection()
2343        stype = -1
2344        if sourcetype.lower().startswith("r"):
2345            stype = 1
2346        elif sourcetype.lower().startswith("s"):
2347            stype = 0
[1504]2348        else:
[1502]2349            raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
[1504]2350        if matchtype.lower().startswith("p"):
2351            matchtype = "pattern"
2352        elif matchtype.lower().startswith("r"):
2353            matchtype = "regex"
2354        else:
2355            raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
[1502]2356        sel = selector()
2357        if isinstance(match, selector):
2358            sel = match
2359        else:
[1504]2360            sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
[1502]2361        self.set_selection(basesel+sel)
2362        self._setsourcetype(stype)
2363        self.set_selection(basesel)
[1573]2364        self._add_history("set_sourcetype", varlist)
[1502]2365
[1589]2366    @print_log_dec
[1857]2367    @preserve_selection
[1819]2368    def auto_quotient(self, preserve=True, mode='paired', verify=False):
[1846]2369        """\
[670]2370        This function allows to build quotients automatically.
[1819]2371        It assumes the observation to have the same number of
[670]2372        "ons" and "offs"
[1846]2373
[670]2374        Parameters:
[1846]2375
[710]2376            preserve:       you can preserve (default) the continuum or
2377                            remove it.  The equations used are
[1857]2378
[670]2379                            preserve: Output = Toff * (on/off) - Toff
[1857]2380
[1070]2381                            remove:   Output = Toff * (on/off) - Ton
[1855]2382
[1573]2383            mode:           the on/off detection mode
[1348]2384                            'paired' (default)
2385                            identifies 'off' scans by the
2386                            trailing '_R' (Mopra/Parkes) or
2387                            '_e'/'_w' (Tid) and matches
2388                            on/off pairs from the observing pattern
[1502]2389                            'time'
2390                            finds the closest off in time
[1348]2391
[1857]2392        .. todo:: verify argument is not implemented
2393
[670]2394        """
[1857]2395        varlist = vars()
[1348]2396        modes = ["time", "paired"]
[670]2397        if not mode in modes:
[876]2398            msg = "please provide valid mode. Valid modes are %s" % (modes)
2399            raise ValueError(msg)
[1348]2400        s = None
2401        if mode.lower() == "paired":
[1857]2402            sel = self.get_selection()
[1356]2403            sel.set_query("SRCTYPE==1")
2404            self.set_selection(sel)
[1348]2405            offs = self.copy()
2406            sel.set_query("SRCTYPE==0")
[1356]2407            self.set_selection(sel)
[1348]2408            ons = self.copy()
2409            s = scantable(self._math._quotient(ons, offs, preserve))
2410        elif mode.lower() == "time":
2411            s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]2412        s._add_history("auto_quotient", varlist)
[876]2413        return s
[710]2414
[1589]2415    @print_log_dec
[1145]2416    def mx_quotient(self, mask = None, weight='median', preserve=True):
[1846]2417        """\
[1143]2418        Form a quotient using "off" beams when observing in "MX" mode.
[1846]2419
[1143]2420        Parameters:
[1846]2421
[1145]2422            mask:           an optional mask to be used when weight == 'stddev'
[1855]2423
[1143]2424            weight:         How to average the off beams.  Default is 'median'.
[1855]2425
[1145]2426            preserve:       you can preserve (default) the continuum or
[1855]2427                            remove it.  The equations used are:
[1846]2428
[1855]2429                                preserve: Output = Toff * (on/off) - Toff
2430
2431                                remove:   Output = Toff * (on/off) - Ton
2432
[1217]2433        """
[1593]2434        mask = mask or ()
[1141]2435        varlist = vars()
2436        on = scantable(self._math._mx_extract(self, 'on'))
[1143]2437        preoff = scantable(self._math._mx_extract(self, 'off'))
2438        off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1217]2439        from asapmath  import quotient
[1145]2440        q = quotient(on, off, preserve)
[1143]2441        q._add_history("mx_quotient", varlist)
[1217]2442        return q
[513]2443
[1589]2444    @print_log_dec
[718]2445    def freq_switch(self, insitu=None):
[1846]2446        """\
[718]2447        Apply frequency switching to the data.
[1846]2448
[718]2449        Parameters:
[1846]2450
[718]2451            insitu:      if False a new scantable is returned.
2452                         Otherwise, the swictching is done in-situ
2453                         The default is taken from .asaprc (False)
[1846]2454
[718]2455        """
2456        if insitu is None: insitu = rcParams['insitu']
[876]2457        self._math._setinsitu(insitu)
[718]2458        varlist = vars()
[876]2459        s = scantable(self._math._freqswitch(self))
[1118]2460        s._add_history("freq_switch", varlist)
[1856]2461        if insitu:
2462            self._assign(s)
2463        else:
2464            return s
[718]2465
[1589]2466    @print_log_dec
[780]2467    def recalc_azel(self):
[1846]2468        """Recalculate the azimuth and elevation for each position."""
[780]2469        varlist = vars()
[876]2470        self._recalcazel()
[780]2471        self._add_history("recalc_azel", varlist)
2472        return
2473
[1589]2474    @print_log_dec
[513]2475    def __add__(self, other):
2476        varlist = vars()
2477        s = None
2478        if isinstance(other, scantable):
[1573]2479            s = scantable(self._math._binaryop(self, other, "ADD"))
[513]2480        elif isinstance(other, float):
[876]2481            s = scantable(self._math._unaryop(self, other, "ADD", False))
[513]2482        else:
[718]2483            raise TypeError("Other input is not a scantable or float value")
[513]2484        s._add_history("operator +", varlist)
2485        return s
2486
[1589]2487    @print_log_dec
[513]2488    def __sub__(self, other):
2489        """
2490        implicit on all axes and on Tsys
2491        """
2492        varlist = vars()
2493        s = None
2494        if isinstance(other, scantable):
[1588]2495            s = scantable(self._math._binaryop(self, other, "SUB"))
[513]2496        elif isinstance(other, float):
[876]2497            s = scantable(self._math._unaryop(self, other, "SUB", False))
[513]2498        else:
[718]2499            raise TypeError("Other input is not a scantable or float value")
[513]2500        s._add_history("operator -", varlist)
2501        return s
[710]2502
[1589]2503    @print_log_dec
[513]2504    def __mul__(self, other):
2505        """
2506        implicit on all axes and on Tsys
2507        """
2508        varlist = vars()
2509        s = None
2510        if isinstance(other, scantable):
[1588]2511            s = scantable(self._math._binaryop(self, other, "MUL"))
[513]2512        elif isinstance(other, float):
[876]2513            s = scantable(self._math._unaryop(self, other, "MUL", False))
[513]2514        else:
[718]2515            raise TypeError("Other input is not a scantable or float value")
[513]2516        s._add_history("operator *", varlist)
2517        return s
2518
[710]2519
[1589]2520    @print_log_dec
[513]2521    def __div__(self, other):
2522        """
2523        implicit on all axes and on Tsys
2524        """
2525        varlist = vars()
2526        s = None
2527        if isinstance(other, scantable):
[1589]2528            s = scantable(self._math._binaryop(self, other, "DIV"))
[513]2529        elif isinstance(other, float):
2530            if other == 0.0:
[718]2531                raise ZeroDivisionError("Dividing by zero is not recommended")
[876]2532            s = scantable(self._math._unaryop(self, other, "DIV", False))
[513]2533        else:
[718]2534            raise TypeError("Other input is not a scantable or float value")
[513]2535        s._add_history("operator /", varlist)
2536        return s
2537
[1856]2538    @print_log_dec
[530]2539    def get_fit(self, row=0):
[1846]2540        """\
[530]2541        Print or return the stored fits for a row in the scantable
[1846]2542
[530]2543        Parameters:
[1846]2544
[530]2545            row:    the row which the fit has been applied to.
[1846]2546
[530]2547        """
2548        if row > self.nrow():
2549            return
[976]2550        from asap.asapfit import asapfit
[530]2551        fit = asapfit(self._getfit(row))
[718]2552        if rcParams['verbose']:
[1819]2553            #print fit
2554            asaplog.push( '%s' %(fit) )
[530]2555            return
2556        else:
2557            return fit.as_dict()
2558
[1483]2559    def flag_nans(self):
[1846]2560        """\
[1483]2561        Utility function to flag NaN values in the scantable.
2562        """
2563        import numpy
2564        basesel = self.get_selection()
2565        for i in range(self.nrow()):
[1589]2566            sel = self.get_row_selector(i)
2567            self.set_selection(basesel+sel)
[1483]2568            nans = numpy.isnan(self._getspectrum(0))
2569        if numpy.any(nans):
2570            bnans = [ bool(v) for v in nans]
2571            self.flag(bnans)
2572        self.set_selection(basesel)
2573
[1588]2574    def get_row_selector(self, rowno):
2575        return selector(beams=self.getbeam(rowno),
2576                        ifs=self.getif(rowno),
2577                        pols=self.getpol(rowno),
2578                        scans=self.getscan(rowno),
2579                        cycles=self.getcycle(rowno))
[1573]2580
[484]2581    def _add_history(self, funcname, parameters):
[1435]2582        if not rcParams['scantable.history']:
2583            return
[484]2584        # create date
2585        sep = "##"
2586        from datetime import datetime
2587        dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
2588        hist = dstr+sep
2589        hist += funcname+sep#cdate+sep
2590        if parameters.has_key('self'): del parameters['self']
[1118]2591        for k, v in parameters.iteritems():
[484]2592            if type(v) is dict:
[1118]2593                for k2, v2 in v.iteritems():
[484]2594                    hist += k2
2595                    hist += "="
[1118]2596                    if isinstance(v2, scantable):
[484]2597                        hist += 'scantable'
2598                    elif k2 == 'mask':
[1118]2599                        if isinstance(v2, list) or isinstance(v2, tuple):
[513]2600                            hist += str(self._zip_mask(v2))
2601                        else:
2602                            hist += str(v2)
[484]2603                    else:
[513]2604                        hist += str(v2)
[484]2605            else:
2606                hist += k
2607                hist += "="
[1118]2608                if isinstance(v, scantable):
[484]2609                    hist += 'scantable'
2610                elif k == 'mask':
[1118]2611                    if isinstance(v, list) or isinstance(v, tuple):
[513]2612                        hist += str(self._zip_mask(v))
2613                    else:
2614                        hist += str(v)
[484]2615                else:
2616                    hist += str(v)
2617            hist += sep
2618        hist = hist[:-2] # remove trailing '##'
2619        self._addhistory(hist)
2620
[710]2621
[484]2622    def _zip_mask(self, mask):
2623        mask = list(mask)
2624        i = 0
2625        segments = []
2626        while mask[i:].count(1):
2627            i += mask[i:].index(1)
2628            if mask[i:].count(0):
2629                j = i + mask[i:].index(0)
2630            else:
[710]2631                j = len(mask)
[1118]2632            segments.append([i, j])
[710]2633            i = j
[484]2634        return segments
[714]2635
[626]2636    def _get_ordinate_label(self):
2637        fu = "("+self.get_fluxunit()+")"
2638        import re
2639        lbl = "Intensity"
[1118]2640        if re.match(".K.", fu):
[626]2641            lbl = "Brightness Temperature "+ fu
[1118]2642        elif re.match(".Jy.", fu):
[626]2643            lbl = "Flux density "+ fu
2644        return lbl
[710]2645
[876]2646    def _check_ifs(self):
2647        nchans = [self.nchan(i) for i in range(self.nif(-1))]
[889]2648        nchans = filter(lambda t: t > 0, nchans)
[876]2649        return (sum(nchans)/len(nchans) == nchans[0])
[976]2650
[1819]2651    def _fill(self, names, unit, average, getpt, antenna):
[976]2652        first = True
2653        fullnames = []
2654        for name in names:
2655            name = os.path.expandvars(name)
2656            name = os.path.expanduser(name)
2657            if not os.path.exists(name):
2658                msg = "File '%s' does not exists" % (name)
2659                if rcParams['verbose']:
2660                    asaplog.push(msg)
[1819]2661                    print_log( 'ERROR' )
[976]2662                    return
2663                raise IOError(msg)
2664            fullnames.append(name)
2665        if average:
2666            asaplog.push('Auto averaging integrations')
[1079]2667        stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]2668        for name in fullnames:
[1073]2669            tbl = Scantable(stype)
[1843]2670            r = filler(tbl)
[1504]2671            rx = rcParams['scantable.reference']
[1843]2672            r.setreferenceexpr(rx)
[976]2673            msg = "Importing %s..." % (name)
[1118]2674            asaplog.push(msg, False)
[976]2675            print_log()
[1843]2676            r.open(name)# antenna, -1, -1, getpt)
2677            r.fill()
[976]2678            if average:
[1118]2679                tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]2680            if not first:
2681                tbl = self._math._merge([self, tbl])
2682            Scantable.__init__(self, tbl)
[1843]2683            r.close()
[1118]2684            del r, tbl
[976]2685            first = False
2686        if unit is not None:
2687            self.set_fluxunit(unit)
[1824]2688        if not is_casapy():
2689            self.set_freqframe(rcParams['scantable.freqframe'])
[976]2690
[1402]2691    def __getitem__(self, key):
2692        if key < 0:
2693            key += self.nrow()
2694        if key >= self.nrow():
2695            raise IndexError("Row index out of range.")
2696        return self._getspectrum(key)
2697
2698    def __setitem__(self, key, value):
2699        if key < 0:
2700            key += self.nrow()
2701        if key >= self.nrow():
2702            raise IndexError("Row index out of range.")
2703        if not hasattr(value, "__len__") or \
2704                len(value) > self.nchan(self.getif(key)):
2705            raise ValueError("Spectrum length doesn't match.")
2706        return self._setspectrum(value, key)
2707
2708    def __len__(self):
2709        return self.nrow()
2710
2711    def __iter__(self):
2712        for i in range(len(self)):
2713            yield self[i]
Note: See TracBrowser for help on using the repository browser.