source: branches/polybatch/python/scantable.py @ 1924

Last change on this file since 1924 was 1924, checked in by Malte Marquarding, 14 years ago

Ticket #206: use STFitEntry as return objetc instead of pointer wrnagling.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 92.2 KB
RevLine 
[1846]1"""This module defines the scantable class."""
2
[1697]3import os
[1691]4try:
5    from functools import wraps as wraps_dec
6except ImportError:
7    from asap.compatibility import wraps as wraps_dec
8
[1824]9from asap.env import is_casapy
[876]10from asap._asap import Scantable
[1843]11from asap._asap import filler
[1824]12from asap.parameters import rcParams
[1862]13from asap.logging import asaplog, asaplog_post_dec
[1824]14from asap.selector import selector
15from asap.linecatalog import linecatalog
[1600]16from asap.coordinate import coordinate
[1859]17from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
[1907]18from asap.asapfitter import fitter
[102]19
[1689]20
21def preserve_selection(func):
[1691]22    @wraps_dec(func)
[1689]23    def wrap(obj, *args, **kw):
24        basesel = obj.get_selection()
[1857]25        try:
26            val = func(obj, *args, **kw)
27        finally:
28            obj.set_selection(basesel)
[1689]29        return val
30    return wrap
31
[1846]32def is_scantable(filename):
33    """Is the given file a scantable?
[1689]34
[1846]35    Parameters:
36
37        filename: the name of the file/directory to test
38
39    """
[1883]40    if ( os.path.isdir(filename)
41         and os.path.exists(filename+'/table.info')
42         and os.path.exists(filename+'/table.dat') ):
43        f=open(filename+'/table.info')
44        l=f.readline()
45        f.close()
46        #if ( l.find('Scantable') != -1 ):
47        if ( l.find('Measurement Set') == -1 ):
48            return True
49        else:
50            return False
51    else:
52        return False
53##     return (os.path.isdir(filename)
54##             and not os.path.exists(filename+'/table.f1')
55##             and os.path.exists(filename+'/table.info'))
[1697]56
[1883]57def is_ms(filename):
58    """Is the given file a MeasurementSet?
[1697]59
[1883]60    Parameters:
61
62        filename: the name of the file/directory to test
63
64    """
65    if ( os.path.isdir(filename)
66         and os.path.exists(filename+'/table.info')
67         and os.path.exists(filename+'/table.dat') ):
68        f=open(filename+'/table.info')
69        l=f.readline()
70        f.close()
71        if ( l.find('Measurement Set') != -1 ):
72            return True
73        else:
74            return False
75    else:
76        return False
77   
[876]78class scantable(Scantable):
[1846]79    """\
80        The ASAP container for scans (single-dish data).
[102]81    """
[1819]82
[1862]83    @asaplog_post_dec
[1916]84    #def __init__(self, filename, average=None, unit=None, getpt=None,
85    #             antenna=None, parallactify=None):
86    def __init__(self, filename, average=None, unit=None, parallactify=None, **args):
[1846]87        """\
[102]88        Create a scantable from a saved one or make a reference
[1846]89
[102]90        Parameters:
[1846]91
92            filename:     the name of an asap table on disk
93                          or
94                          the name of a rpfits/sdfits/ms file
95                          (integrations within scans are auto averaged
96                          and the whole file is read) or
97                          [advanced] a reference to an existing scantable
98
99            average:      average all integrations withinb a scan on read.
100                          The default (True) is taken from .asaprc.
101
[484]102            unit:         brightness unit; must be consistent with K or Jy.
[1846]103                          Over-rides the default selected by the filler
104                          (input rpfits/sdfits/ms) or replaces the value
105                          in existing scantables
106
107            getpt:        for MeasurementSet input data only:
108                          If True, all pointing data are filled.
109                          The deafult is False, which makes time to load
110                          the MS data faster in some cases.
111
[1920]112            antenna:      for MeasurementSet input data only:
113                          Antenna selection. integer (id) or string (name or id).
[1846]114
115            parallactify: Indicate that the data had been parallatified. Default
116                          is taken from rc file.
117
[710]118        """
[976]119        if average is None:
[710]120            average = rcParams['scantable.autoaverage']
[1916]121        #if getpt is None:
122        #    getpt = True
123        #if antenna is not None:
124        #    asaplog.push("Antenna selection currently unsupported."
125        #                 "Using ''")
126        #    asaplog.post('WARN')
127        #if antenna is None:
128        #    antenna = ''
129        #elif type(antenna) == int:
130        #    antenna = '%s' % antenna
131        #elif type(antenna) == list:
132        #    tmpstr = ''
133        #    for i in range( len(antenna) ):
134        #        if type(antenna[i]) == int:
135        #            tmpstr = tmpstr + ('%s,'%(antenna[i]))
136        #        elif type(antenna[i]) == str:
137        #            tmpstr=tmpstr+antenna[i]+','
138        #        else:
139        #            raise TypeError('Bad antenna selection.')
140        #    antenna = tmpstr.rstrip(',')
[1593]141        parallactify = parallactify or rcParams['scantable.parallactify']
[1259]142        varlist = vars()
[876]143        from asap._asap import stmath
[1819]144        self._math = stmath( rcParams['insitu'] )
[876]145        if isinstance(filename, Scantable):
146            Scantable.__init__(self, filename)
[181]147        else:
[1697]148            if isinstance(filename, str):
[976]149                filename = os.path.expandvars(filename)
150                filename = os.path.expanduser(filename)
151                if not os.path.exists(filename):
152                    s = "File '%s' not found." % (filename)
153                    raise IOError(s)
[1697]154                if is_scantable(filename):
155                    ondisk = rcParams['scantable.storage'] == 'disk'
156                    Scantable.__init__(self, filename, ondisk)
157                    if unit is not None:
158                        self.set_fluxunit(unit)
[1819]159                    # do not reset to the default freqframe
160                    #self.set_freqframe(rcParams['scantable.freqframe'])
[1883]161                #elif os.path.isdir(filename) \
162                #         and not os.path.exists(filename+'/table.f1'):
163                elif is_ms(filename):
[1916]164                    # Measurement Set
165                    opts={'ms': {}}
166                    mskeys=['getpt','antenna']
167                    for key in mskeys:
168                        if key in args.keys():
169                            opts['ms'][key] = args[key]
170                    #self._fill([filename], unit, average, getpt, antenna)
171                    self._fill([filename], unit, average, opts)
[1893]172                elif os.path.isfile(filename):
[1916]173                    #self._fill([filename], unit, average, getpt, antenna)
174                    self._fill([filename], unit, average)
[1883]175                else:
[1819]176                    msg = "The given file '%s'is not a valid " \
177                          "asap table." % (filename)
[1859]178                    raise IOError(msg)
[1118]179            elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]180                  and isinstance(filename[-1], str):
[1916]181                #self._fill(filename, unit, average, getpt, antenna)
182                self._fill(filename, unit, average)
[1586]183        self.parallactify(parallactify)
[1259]184        self._add_history("scantable", varlist)
[102]185
[1862]186    @asaplog_post_dec
[876]187    def save(self, name=None, format=None, overwrite=False):
[1846]188        """\
[1280]189        Store the scantable on disk. This can be an asap (aips++) Table,
190        SDFITS or MS2 format.
[1846]191
[116]192        Parameters:
[1846]193
[1093]194            name:        the name of the outputfile. For format "ASCII"
195                         this is the root file name (data in 'name'.txt
[497]196                         and header in 'name'_header.txt)
[1855]197
[116]198            format:      an optional file format. Default is ASAP.
[1855]199                         Allowed are:
200
201                            * 'ASAP' (save as ASAP [aips++] Table),
202                            * 'SDFITS' (save as SDFITS file)
203                            * 'ASCII' (saves as ascii text file)
204                            * 'MS2' (saves as an casacore MeasurementSet V2)
205                            * 'FITS' (save as image FITS - not readable by class)
206                            * 'CLASS' (save as FITS readable by CLASS)
207
[411]208            overwrite:   If the file should be overwritten if it exists.
[256]209                         The default False is to return with warning
[411]210                         without writing the output. USE WITH CARE.
[1855]211
[1846]212        Example::
213
[116]214            scan.save('myscan.asap')
[1118]215            scan.save('myscan.sdfits', 'SDFITS')
[1846]216
[116]217        """
[411]218        from os import path
[1593]219        format = format or rcParams['scantable.save']
[256]220        suffix = '.'+format.lower()
[1118]221        if name is None or name == "":
[256]222            name = 'scantable'+suffix
[718]223            msg = "No filename given. Using default name %s..." % name
224            asaplog.push(msg)
[411]225        name = path.expandvars(name)
[256]226        if path.isfile(name) or path.isdir(name):
227            if not overwrite:
[718]228                msg = "File %s exists." % name
[1859]229                raise IOError(msg)
[451]230        format2 = format.upper()
231        if format2 == 'ASAP':
[116]232            self._save(name)
233        else:
[989]234            from asap._asap import stwriter as stw
[1118]235            writer = stw(format2)
236            writer.write(self, name)
[116]237        return
238
[102]239    def copy(self):
[1846]240        """Return a copy of this scantable.
241
242        *Note*:
243
[1348]244            This makes a full (deep) copy. scan2 = scan1 makes a reference.
[1846]245
246        Example::
247
[102]248            copiedscan = scan.copy()
[1846]249
[102]250        """
[876]251        sd = scantable(Scantable._copy(self))
[113]252        return sd
253
[1093]254    def drop_scan(self, scanid=None):
[1846]255        """\
[1093]256        Return a new scantable where the specified scan number(s) has(have)
257        been dropped.
[1846]258
[1093]259        Parameters:
[1846]260
[1093]261            scanid:    a (list of) scan number(s)
[1846]262
[1093]263        """
264        from asap import _is_sequence_or_number as _is_valid
265        from asap import _to_list
266        from asap import unique
267        if not _is_valid(scanid):
[1859]268            raise RuntimeError( 'Please specify a scanno to drop from the scantable' )
269        scanid = _to_list(scanid)
270        allscans = unique([ self.getscan(i) for i in range(self.nrow())])
271        for sid in scanid: allscans.remove(sid)
272        if len(allscans) == 0:
273            raise ValueError("Can't remove all scans")
274        sel = selector(scans=allscans)
275        return self._select_copy(sel)
[1093]276
[1594]277    def _select_copy(self, selection):
278        orig = self.get_selection()
279        self.set_selection(orig+selection)
280        cp = self.copy()
281        self.set_selection(orig)
282        return cp
283
[102]284    def get_scan(self, scanid=None):
[1855]285        """\
[102]286        Return a specific scan (by scanno) or collection of scans (by
287        source name) in a new scantable.
[1846]288
289        *Note*:
290
[1348]291            See scantable.drop_scan() for the inverse operation.
[1846]292
[102]293        Parameters:
[1846]294
[513]295            scanid:    a (list of) scanno or a source name, unix-style
296                       patterns are accepted for source name matching, e.g.
297                       '*_R' gets all 'ref scans
[1846]298
299        Example::
300
[513]301            # get all scans containing the source '323p459'
302            newscan = scan.get_scan('323p459')
303            # get all 'off' scans
304            refscans = scan.get_scan('*_R')
305            # get a susbset of scans by scanno (as listed in scan.summary())
[1118]306            newscan = scan.get_scan([0, 2, 7, 10])
[1846]307
[102]308        """
309        if scanid is None:
[1859]310            raise RuntimeError( 'Please specify a scan no or name to '
311                                'retrieve from the scantable' )
[102]312        try:
[946]313            bsel = self.get_selection()
314            sel = selector()
[102]315            if type(scanid) is str:
[946]316                sel.set_name(scanid)
[1594]317                return self._select_copy(sel)
[102]318            elif type(scanid) is int:
[946]319                sel.set_scans([scanid])
[1594]320                return self._select_copy(sel)
[381]321            elif type(scanid) is list:
[946]322                sel.set_scans(scanid)
[1594]323                return self._select_copy(sel)
[381]324            else:
[718]325                msg = "Illegal scanid type, use 'int' or 'list' if ints."
[1859]326                raise TypeError(msg)
[102]327        except RuntimeError:
[1859]328            raise
[102]329
330    def __str__(self):
[1118]331        return Scantable._summary(self, True)
[102]332
[976]333    def summary(self, filename=None):
[1846]334        """\
[102]335        Print a summary of the contents of this scantable.
[1846]336
[102]337        Parameters:
[1846]338
[102]339            filename:    the name of a file to write the putput to
340                         Default - no file output
[1846]341
[102]342        """
[976]343        info = Scantable._summary(self, True)
[102]344        if filename is not None:
[256]345            if filename is "":
346                filename = 'scantable_summary.txt'
[415]347            from os.path import expandvars, isdir
[411]348            filename = expandvars(filename)
[415]349            if not isdir(filename):
[413]350                data = open(filename, 'w')
351                data.write(info)
352                data.close()
353            else:
[718]354                msg = "Illegal file name '%s'." % (filename)
[1859]355                raise IOError(msg)
356        return page(info)
[710]357
[1512]358    def get_spectrum(self, rowno):
[1471]359        """Return the spectrum for the current row in the scantable as a list.
[1846]360
[1471]361        Parameters:
[1846]362
[1573]363             rowno:   the row number to retrieve the spectrum from
[1846]364
[1471]365        """
366        return self._getspectrum(rowno)
[946]367
[1471]368    def get_mask(self, rowno):
369        """Return the mask for the current row in the scantable as a list.
[1846]370
[1471]371        Parameters:
[1846]372
[1573]373             rowno:   the row number to retrieve the mask from
[1846]374
[1471]375        """
376        return self._getmask(rowno)
377
378    def set_spectrum(self, spec, rowno):
379        """Return the spectrum for the current row in the scantable as a list.
[1846]380
[1471]381        Parameters:
[1846]382
[1855]383             spec:   the new spectrum
[1846]384
[1855]385             rowno:  the row number to set the spectrum for
386
[1471]387        """
388        assert(len(spec) == self.nchan())
389        return self._setspectrum(spec, rowno)
390
[1600]391    def get_coordinate(self, rowno):
392        """Return the (spectral) coordinate for a a given 'rowno'.
[1846]393
394        *Note*:
395
[1600]396            * This coordinate is only valid until a scantable method modifies
397              the frequency axis.
398            * This coordinate does contain the original frequency set-up
399              NOT the new frame. The conversions however are done using the user
400              specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
401              use scantable.freq_align first. Without it there is no closure,
[1846]402              i.e.::
[1600]403
[1846]404                  c = myscan.get_coordinate(0)
405                  c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
406
[1600]407        Parameters:
[1846]408
[1600]409             rowno:    the row number for the spectral coordinate
410
411        """
412        return coordinate(Scantable.get_coordinate(self, rowno))
413
[946]414    def get_selection(self):
[1846]415        """\
[1005]416        Get the selection object currently set on this scantable.
[1846]417
418        Example::
419
[1005]420            sel = scan.get_selection()
421            sel.set_ifs(0)              # select IF 0
422            scan.set_selection(sel)     # apply modified selection
[1846]423
[946]424        """
425        return selector(self._getselection())
426
[1576]427    def set_selection(self, selection=None, **kw):
[1846]428        """\
[1005]429        Select a subset of the data. All following operations on this scantable
430        are only applied to thi selection.
[1846]431
[1005]432        Parameters:
[1697]433
[1846]434            selection:    a selector object (default unset the selection), or
435                          any combination of "pols", "ifs", "beams", "scans",
436                          "cycles", "name", "query"
[1697]437
[1846]438        Examples::
[1697]439
[1005]440            sel = selector()         # create a selection object
[1118]441            self.set_scans([0, 3])    # select SCANNO 0 and 3
[1005]442            scan.set_selection(sel)  # set the selection
443            scan.summary()           # will only print summary of scanno 0 an 3
444            scan.set_selection()     # unset the selection
[1697]445            # or the equivalent
446            scan.set_selection(scans=[0,3])
447            scan.summary()           # will only print summary of scanno 0 an 3
448            scan.set_selection()     # unset the selection
[1846]449
[946]450        """
[1576]451        if selection is None:
452            # reset
453            if len(kw) == 0:
454                selection = selector()
455            else:
456                # try keywords
457                for k in kw:
458                    if k not in selector.fields:
459                        raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
460                selection = selector(**kw)
[946]461        self._setselection(selection)
462
[1819]463    def get_row(self, row=0, insitu=None):
[1846]464        """\
[1819]465        Select a row in the scantable.
466        Return a scantable with single row.
[1846]467
[1819]468        Parameters:
[1846]469
470            row:    row no of integration, default is 0.
471            insitu: if False a new scantable is returned. Otherwise, the
472                    scaling is done in-situ. The default is taken from .asaprc
473                    (False)
474
[1819]475        """
476        if insitu is None: insitu = rcParams['insitu']
477        if not insitu:
478            workscan = self.copy()
479        else:
480            workscan = self
481        # Select a row
482        sel=selector()
483        sel.set_scans([workscan.getscan(row)])
484        sel.set_cycles([workscan.getcycle(row)])
485        sel.set_beams([workscan.getbeam(row)])
486        sel.set_ifs([workscan.getif(row)])
487        sel.set_polarisations([workscan.getpol(row)])
488        sel.set_name(workscan._getsourcename(row))
489        workscan.set_selection(sel)
490        if not workscan.nrow() == 1:
491            msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
492            raise RuntimeError(msg)
493        del sel
494        if insitu:
495            self._assign(workscan)
496        else:
497            return workscan
498
[1862]499    @asaplog_post_dec
[1907]500    def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
[1846]501        """\
[135]502        Determine the specified statistic of the current beam/if/pol
[102]503        Takes a 'mask' as an optional parameter to specify which
504        channels should be excluded.
[1846]505
[102]506        Parameters:
[1846]507
[1819]508            stat:    'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
509                     'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
[1855]510
[135]511            mask:    an optional mask specifying where the statistic
[102]512                     should be determined.
[1855]513
[1819]514            form:    format string to print statistic values
[1846]515
[1907]516            row:     row number of spectrum to process.
517                     (default is None: for all rows)
[1846]518
[1907]519        Example:
[113]520            scan.set_unit('channel')
[1118]521            msk = scan.create_mask([100, 200], [500, 600])
[135]522            scan.stats(stat='mean', mask=m)
[1846]523
[102]524        """
[1593]525        mask = mask or []
[876]526        if not self._check_ifs():
[1118]527            raise ValueError("Cannot apply mask as the IFs have different "
528                             "number of channels. Please use setselection() "
529                             "to select individual IFs")
[1819]530        rtnabc = False
531        if stat.lower().endswith('_abc'): rtnabc = True
532        getchan = False
533        if stat.lower().startswith('min') or stat.lower().startswith('max'):
534            chan = self._math._minmaxchan(self, mask, stat)
535            getchan = True
536            statvals = []
[1907]537        if not rtnabc:
538            if row == None:
539                statvals = self._math._stats(self, mask, stat)
540            else:
541                statvals = self._math._statsrow(self, mask, stat, int(row))
[256]542
[1819]543        #def cb(i):
544        #    return statvals[i]
[256]545
[1819]546        #return self._row_callback(cb, stat)
[102]547
[1819]548        label=stat
549        #callback=cb
550        out = ""
551        #outvec = []
552        sep = '-'*50
[1907]553
554        if row == None:
555            rows = xrange(self.nrow())
556        elif isinstance(row, int):
557            rows = [ row ]
558
559        for i in rows:
[1819]560            refstr = ''
561            statunit= ''
562            if getchan:
563                qx, qy = self.chan2data(rowno=i, chan=chan[i])
564                if rtnabc:
565                    statvals.append(qx['value'])
566                    refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
567                    statunit= '['+qx['unit']+']'
568                else:
569                    refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
570
571            tm = self._gettime(i)
572            src = self._getsourcename(i)
573            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
574            out += 'Time[%s]:\n' % (tm)
[1907]575            if self.nbeam(-1) > 1: out +=  ' Beam[%d] ' % (self.getbeam(i))
576            if self.nif(-1) > 1:   out +=  ' IF[%d] ' % (self.getif(i))
577            if self.npol(-1) > 1:  out +=  ' Pol[%d] ' % (self.getpol(i))
[1819]578            #outvec.append(callback(i))
[1907]579            if len(rows) > 1:
580                # out += ('= %'+form) % (outvec[i]) +'   '+refstr+'\n'
581                out += ('= %'+form) % (statvals[i]) +'   '+refstr+'\n'
582            else:
583                # out += ('= %'+form) % (outvec[0]) +'   '+refstr+'\n'
584                out += ('= %'+form) % (statvals[0]) +'   '+refstr+'\n'
[1819]585            out +=  sep+"\n"
586
[1859]587        import os
588        if os.environ.has_key( 'USER' ):
589            usr = os.environ['USER']
590        else:
591            import commands
592            usr = commands.getoutput( 'whoami' )
593        tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
594        f = open(tmpfile,'w')
595        print >> f, sep
596        print >> f, ' %s %s' % (label, statunit)
597        print >> f, sep
598        print >> f, out
599        f.close()
600        f = open(tmpfile,'r')
601        x = f.readlines()
602        f.close()
603        asaplog.push(''.join(x), False)
604
[1819]605        return statvals
606
607    def chan2data(self, rowno=0, chan=0):
[1846]608        """\
[1819]609        Returns channel/frequency/velocity and spectral value
610        at an arbitrary row and channel in the scantable.
[1846]611
[1819]612        Parameters:
[1846]613
[1819]614            rowno:   a row number in the scantable. Default is the
615                     first row, i.e. rowno=0
[1855]616
[1819]617            chan:    a channel in the scantable. Default is the first
618                     channel, i.e. pos=0
[1846]619
[1819]620        """
621        if isinstance(rowno, int) and isinstance(chan, int):
622            qx = {'unit': self.get_unit(),
623                  'value': self._getabcissa(rowno)[chan]}
624            qy = {'unit': self.get_fluxunit(),
625                  'value': self._getspectrum(rowno)[chan]}
626            return qx, qy
627
[1118]628    def stddev(self, mask=None):
[1846]629        """\
[135]630        Determine the standard deviation of the current beam/if/pol
631        Takes a 'mask' as an optional parameter to specify which
632        channels should be excluded.
[1846]633
[135]634        Parameters:
[1846]635
[135]636            mask:    an optional mask specifying where the standard
637                     deviation should be determined.
638
[1846]639        Example::
640
[135]641            scan.set_unit('channel')
[1118]642            msk = scan.create_mask([100, 200], [500, 600])
[135]643            scan.stddev(mask=m)
[1846]644
[135]645        """
[1118]646        return self.stats(stat='stddev', mask=mask);
[135]647
[1003]648
[1259]649    def get_column_names(self):
[1846]650        """\
[1003]651        Return a  list of column names, which can be used for selection.
652        """
[1259]653        return list(Scantable.get_column_names(self))
[1003]654
[1730]655    def get_tsys(self, row=-1):
[1846]656        """\
[113]657        Return the System temperatures.
[1846]658
659        Parameters:
660
661            row:    the rowno to get the information for. (default all rows)
662
[113]663        Returns:
[1846]664
[876]665            a list of Tsys values for the current selection
[1846]666
[113]667        """
[1730]668        if row > -1:
669            return self._get_column(self._gettsys, row)
[876]670        return self._row_callback(self._gettsys, "Tsys")
[256]671
[1730]672
673    def get_weather(self, row=-1):
[1846]674        """\
675        Return the weather informations.
676
677        Parameters:
678
679            row:    the rowno to get the information for. (default all rows)
680
681        Returns:
682
683            a dict or list of of dicts of values for the current selection
684
685        """
686
[1730]687        values = self._get_column(self._get_weather, row)
688        if row > -1:
689            return {'temperature': values[0],
690                    'pressure': values[1], 'humidity' : values[2],
691                    'windspeed' : values[3], 'windaz' : values[4]
692                    }
693        else:
694            out = []
695            for r in values:
696
697                out.append({'temperature': r[0],
698                            'pressure': r[1], 'humidity' : r[2],
699                            'windspeed' : r[3], 'windaz' : r[4]
700                    })
701            return out
702
[876]703    def _row_callback(self, callback, label):
704        out = ""
[1118]705        outvec = []
[1590]706        sep = '-'*50
[876]707        for i in range(self.nrow()):
708            tm = self._gettime(i)
709            src = self._getsourcename(i)
[1590]710            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
[876]711            out += 'Time[%s]:\n' % (tm)
[1590]712            if self.nbeam(-1) > 1:
713                out +=  ' Beam[%d] ' % (self.getbeam(i))
714            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (self.getif(i))
715            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (self.getpol(i))
[876]716            outvec.append(callback(i))
717            out += '= %3.3f\n' % (outvec[i])
[1590]718            out +=  sep+'\n'
[1859]719
720        asaplog.push(sep)
721        asaplog.push(" %s" % (label))
722        asaplog.push(sep)
723        asaplog.push(out)
[1861]724        asaplog.post()
[1175]725        return outvec
[256]726
[1070]727    def _get_column(self, callback, row=-1):
728        """
729        """
730        if row == -1:
731            return [callback(i) for i in range(self.nrow())]
732        else:
[1819]733            if  0 <= row < self.nrow():
[1070]734                return callback(row)
[256]735
[1070]736
[1348]737    def get_time(self, row=-1, asdatetime=False):
[1846]738        """\
[113]739        Get a list of time stamps for the observations.
[1348]740        Return a datetime object for each integration time stamp in the scantable.
[1846]741
[113]742        Parameters:
[1846]743
[1348]744            row:          row no of integration. Default -1 return all rows
[1855]745
[1348]746            asdatetime:   return values as datetime objects rather than strings
[1846]747
[113]748        """
[1175]749        from time import strptime
750        from datetime import datetime
[1392]751        times = self._get_column(self._gettime, row)
[1348]752        if not asdatetime:
[1392]753            return times
[1175]754        format = "%Y/%m/%d/%H:%M:%S"
755        if isinstance(times, list):
756            return [datetime(*strptime(i, format)[:6]) for i in times]
757        else:
758            return datetime(*strptime(times, format)[:6])
[102]759
[1348]760
761    def get_inttime(self, row=-1):
[1846]762        """\
[1348]763        Get a list of integration times for the observations.
764        Return a time in seconds for each integration in the scantable.
[1846]765
[1348]766        Parameters:
[1846]767
[1348]768            row:    row no of integration. Default -1 return all rows.
[1846]769
[1348]770        """
[1573]771        return self._get_column(self._getinttime, row)
[1348]772
[1573]773
[714]774    def get_sourcename(self, row=-1):
[1846]775        """\
[794]776        Get a list source names for the observations.
[714]777        Return a string for each integration in the scantable.
778        Parameters:
[1846]779
[1348]780            row:    row no of integration. Default -1 return all rows.
[1846]781
[714]782        """
[1070]783        return self._get_column(self._getsourcename, row)
[714]784
[794]785    def get_elevation(self, row=-1):
[1846]786        """\
[794]787        Get a list of elevations for the observations.
788        Return a float for each integration in the scantable.
[1846]789
[794]790        Parameters:
[1846]791
[1348]792            row:    row no of integration. Default -1 return all rows.
[1846]793
[794]794        """
[1070]795        return self._get_column(self._getelevation, row)
[794]796
797    def get_azimuth(self, row=-1):
[1846]798        """\
[794]799        Get a list of azimuths for the observations.
800        Return a float for each integration in the scantable.
[1846]801
[794]802        Parameters:
[1348]803            row:    row no of integration. Default -1 return all rows.
[1846]804
[794]805        """
[1070]806        return self._get_column(self._getazimuth, row)
[794]807
808    def get_parangle(self, row=-1):
[1846]809        """\
[794]810        Get a list of parallactic angles for the observations.
811        Return a float for each integration in the scantable.
[1846]812
[794]813        Parameters:
[1846]814
[1348]815            row:    row no of integration. Default -1 return all rows.
[1846]816
[794]817        """
[1070]818        return self._get_column(self._getparangle, row)
[794]819
[1070]820    def get_direction(self, row=-1):
821        """
822        Get a list of Positions on the sky (direction) for the observations.
[1594]823        Return a string for each integration in the scantable.
[1855]824
[1070]825        Parameters:
[1855]826
[1070]827            row:    row no of integration. Default -1 return all rows
[1855]828
[1070]829        """
830        return self._get_column(self._getdirection, row)
831
[1391]832    def get_directionval(self, row=-1):
[1846]833        """\
[1391]834        Get a list of Positions on the sky (direction) for the observations.
835        Return a float for each integration in the scantable.
[1846]836
[1391]837        Parameters:
[1846]838
[1391]839            row:    row no of integration. Default -1 return all rows
[1846]840
[1391]841        """
842        return self._get_column(self._getdirectionvec, row)
843
[1862]844    @asaplog_post_dec
[102]845    def set_unit(self, unit='channel'):
[1846]846        """\
[102]847        Set the unit for all following operations on this scantable
[1846]848
[102]849        Parameters:
[1846]850
851            unit:    optional unit, default is 'channel'. Use one of '*Hz',
852                     'km/s', 'channel' or equivalent ''
853
[102]854        """
[484]855        varlist = vars()
[1118]856        if unit in ['', 'pixel', 'channel']:
[113]857            unit = ''
858        inf = list(self._getcoordinfo())
859        inf[0] = unit
860        self._setcoordinfo(inf)
[1118]861        self._add_history("set_unit", varlist)
[113]862
[1862]863    @asaplog_post_dec
[484]864    def set_instrument(self, instr):
[1846]865        """\
[1348]866        Set the instrument for subsequent processing.
[1846]867
[358]868        Parameters:
[1846]869
[710]870            instr:    Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]871                      'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[1846]872
[358]873        """
874        self._setInstrument(instr)
[1118]875        self._add_history("set_instument", vars())
[358]876
[1862]877    @asaplog_post_dec
[1190]878    def set_feedtype(self, feedtype):
[1846]879        """\
[1190]880        Overwrite the feed type, which might not be set correctly.
[1846]881
[1190]882        Parameters:
[1846]883
[1190]884            feedtype:     'linear' or 'circular'
[1846]885
[1190]886        """
887        self._setfeedtype(feedtype)
888        self._add_history("set_feedtype", vars())
889
[1862]890    @asaplog_post_dec
[276]891    def set_doppler(self, doppler='RADIO'):
[1846]892        """\
[276]893        Set the doppler for all following operations on this scantable.
[1846]894
[276]895        Parameters:
[1846]896
[276]897            doppler:    One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
[1846]898
[276]899        """
[484]900        varlist = vars()
[276]901        inf = list(self._getcoordinfo())
902        inf[2] = doppler
903        self._setcoordinfo(inf)
[1118]904        self._add_history("set_doppler", vars())
[710]905
[1862]906    @asaplog_post_dec
[226]907    def set_freqframe(self, frame=None):
[1846]908        """\
[113]909        Set the frame type of the Spectral Axis.
[1846]910
[113]911        Parameters:
[1846]912
[591]913            frame:   an optional frame type, default 'LSRK'. Valid frames are:
[1819]914                     'TOPO', 'LSRD', 'LSRK', 'BARY',
[1118]915                     'GEO', 'GALACTO', 'LGROUP', 'CMB'
[1846]916
917        Example::
918
[113]919            scan.set_freqframe('BARY')
[1846]920
[113]921        """
[1593]922        frame = frame or rcParams['scantable.freqframe']
[484]923        varlist = vars()
[1819]924        # "REST" is not implemented in casacore
925        #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
926        #           'GEO', 'GALACTO', 'LGROUP', 'CMB']
927        valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
[1118]928                   'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]929
[989]930        if frame in valid:
[113]931            inf = list(self._getcoordinfo())
932            inf[1] = frame
933            self._setcoordinfo(inf)
[1118]934            self._add_history("set_freqframe", varlist)
[102]935        else:
[1118]936            msg  = "Please specify a valid freq type. Valid types are:\n", valid
[1859]937            raise TypeError(msg)
[710]938
[1862]939    @asaplog_post_dec
[989]940    def set_dirframe(self, frame=""):
[1846]941        """\
[989]942        Set the frame type of the Direction on the sky.
[1846]943
[989]944        Parameters:
[1846]945
[989]946            frame:   an optional frame type, default ''. Valid frames are:
947                     'J2000', 'B1950', 'GALACTIC'
[1846]948
949        Example:
950
[989]951            scan.set_dirframe('GALACTIC')
[1846]952
[989]953        """
954        varlist = vars()
[1859]955        Scantable.set_dirframe(self, frame)
[1118]956        self._add_history("set_dirframe", varlist)
[989]957
[113]958    def get_unit(self):
[1846]959        """\
[113]960        Get the default unit set in this scantable
[1846]961
[113]962        Returns:
[1846]963
[113]964            A unit string
[1846]965
[113]966        """
967        inf = self._getcoordinfo()
968        unit = inf[0]
969        if unit == '': unit = 'channel'
970        return unit
[102]971
[1862]972    @asaplog_post_dec
[158]973    def get_abcissa(self, rowno=0):
[1846]974        """\
[158]975        Get the abcissa in the current coordinate setup for the currently
[113]976        selected Beam/IF/Pol
[1846]977
[113]978        Parameters:
[1846]979
[226]980            rowno:    an optional row number in the scantable. Default is the
981                      first row, i.e. rowno=0
[1846]982
[113]983        Returns:
[1846]984
[1348]985            The abcissa values and the format string (as a dictionary)
[1846]986
[113]987        """
[256]988        abc = self._getabcissa(rowno)
[710]989        lbl = self._getabcissalabel(rowno)
[158]990        return abc, lbl
[113]991
[1862]992    @asaplog_post_dec
[1819]993    def flag(self, mask=None, unflag=False):
[1846]994        """\
[1001]995        Flag the selected data using an optional channel mask.
[1846]996
[1001]997        Parameters:
[1846]998
[1001]999            mask:   an optional channel mask, created with create_mask. Default
1000                    (no mask) is all channels.
[1855]1001
[1819]1002            unflag:    if True, unflag the data
[1846]1003
[1001]1004        """
1005        varlist = vars()
[1593]1006        mask = mask or []
[1859]1007        self._flag(mask, unflag)
[1001]1008        self._add_history("flag", varlist)
1009
[1862]1010    @asaplog_post_dec
[1819]1011    def flag_row(self, rows=[], unflag=False):
[1846]1012        """\
[1819]1013        Flag the selected data in row-based manner.
[1846]1014
[1819]1015        Parameters:
[1846]1016
[1843]1017            rows:   list of row numbers to be flagged. Default is no row
1018                    (must be explicitly specified to execute row-based flagging).
[1855]1019
[1819]1020            unflag: if True, unflag the data.
[1846]1021
[1819]1022        """
1023        varlist = vars()
[1859]1024        self._flag_row(rows, unflag)
[1819]1025        self._add_history("flag_row", varlist)
1026
[1862]1027    @asaplog_post_dec
[1819]1028    def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
[1846]1029        """\
[1819]1030        Flag the selected data outside a specified range (in channel-base)
[1846]1031
[1819]1032        Parameters:
[1846]1033
[1819]1034            uthres:      upper threshold.
[1855]1035
[1819]1036            dthres:      lower threshold
[1846]1037
[1819]1038            clipoutside: True for flagging data outside the range [dthres:uthres].
1039                         False for glagging data inside the range.
[1855]1040
[1846]1041            unflag:      if True, unflag the data.
1042
[1819]1043        """
1044        varlist = vars()
[1859]1045        self._clip(uthres, dthres, clipoutside, unflag)
[1819]1046        self._add_history("clip", varlist)
1047
[1862]1048    @asaplog_post_dec
[1584]1049    def lag_flag(self, start, end, unit="MHz", insitu=None):
[1846]1050        """\
[1192]1051        Flag the data in 'lag' space by providing a frequency to remove.
[1584]1052        Flagged data in the scantable gets interpolated over the region.
[1192]1053        No taper is applied.
[1846]1054
[1192]1055        Parameters:
[1846]1056
[1579]1057            start:    the start frequency (really a period within the
1058                      bandwidth)  or period to remove
[1855]1059
[1579]1060            end:      the end frequency or period to remove
[1855]1061
[1584]1062            unit:     the frequency unit (default "MHz") or "" for
[1579]1063                      explicit lag channels
[1846]1064
1065        *Notes*:
1066
[1579]1067            It is recommended to flag edges of the band or strong
[1348]1068            signals beforehand.
[1846]1069
[1192]1070        """
1071        if insitu is None: insitu = rcParams['insitu']
1072        self._math._setinsitu(insitu)
1073        varlist = vars()
[1579]1074        base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1075        if not (unit == "" or base.has_key(unit)):
[1192]1076            raise ValueError("%s is not a valid unit." % unit)
[1859]1077        if unit == "":
1078            s = scantable(self._math._lag_flag(self, start, end, "lags"))
1079        else:
1080            s = scantable(self._math._lag_flag(self, start*base[unit],
1081                                               end*base[unit], "frequency"))
[1192]1082        s._add_history("lag_flag", varlist)
1083        if insitu:
1084            self._assign(s)
1085        else:
1086            return s
[1001]1087
[1862]1088    @asaplog_post_dec
[113]1089    def create_mask(self, *args, **kwargs):
[1846]1090        """\
[1118]1091        Compute and return a mask based on [min, max] windows.
[189]1092        The specified windows are to be INCLUDED, when the mask is
[113]1093        applied.
[1846]1094
[102]1095        Parameters:
[1846]1096
[1118]1097            [min, max], [min2, max2], ...
[1024]1098                Pairs of start/end points (inclusive)specifying the regions
[102]1099                to be masked
[1855]1100
[189]1101            invert:     optional argument. If specified as True,
1102                        return an inverted mask, i.e. the regions
1103                        specified are EXCLUDED
[1855]1104
[513]1105            row:        create the mask using the specified row for
1106                        unit conversions, default is row=0
1107                        only necessary if frequency varies over rows.
[1846]1108
1109        Examples::
1110
[113]1111            scan.set_unit('channel')
[1846]1112            # a)
[1118]1113            msk = scan.create_mask([400, 500], [800, 900])
[189]1114            # masks everything outside 400 and 500
[113]1115            # and 800 and 900 in the unit 'channel'
1116
[1846]1117            # b)
[1118]1118            msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]1119            # masks the regions between 400 and 500
[113]1120            # and 800 and 900 in the unit 'channel'
[1846]1121
1122            # c)
1123            #mask only channel 400
[1554]1124            msk =  scan.create_mask([400])
[1846]1125
[102]1126        """
[1554]1127        row = kwargs.get("row", 0)
[513]1128        data = self._getabcissa(row)
[113]1129        u = self._getcoordinfo()[0]
[1859]1130        if u == "":
1131            u = "channel"
1132        msg = "The current mask window unit is %s" % u
1133        i = self._check_ifs()
1134        if not i:
1135            msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1136        asaplog.push(msg)
[102]1137        n = self.nchan()
[1295]1138        msk = _n_bools(n, False)
[710]1139        # test if args is a 'list' or a 'normal *args - UGLY!!!
1140
[1118]1141        ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1142             and args or args[0]
[710]1143        for window in ws:
[1554]1144            if len(window) == 1:
1145                window = [window[0], window[0]]
1146            if len(window) == 0 or  len(window) > 2:
1147                raise ValueError("A window needs to be defined as [start(, end)]")
[1545]1148            if window[0] > window[1]:
1149                tmp = window[0]
1150                window[0] = window[1]
1151                window[1] = tmp
[102]1152            for i in range(n):
[1024]1153                if data[i] >= window[0] and data[i] <= window[1]:
[1295]1154                    msk[i] = True
[113]1155        if kwargs.has_key('invert'):
1156            if kwargs.get('invert'):
[1295]1157                msk = mask_not(msk)
[102]1158        return msk
[710]1159
[1924]1160    def get_masklist(self, mask=None, row=0, silent=False):
[1846]1161        """\
[1819]1162        Compute and return a list of mask windows, [min, max].
[1846]1163
[1819]1164        Parameters:
[1846]1165
[1819]1166            mask:       channel mask, created with create_mask.
[1855]1167
[1819]1168            row:        calcutate the masklist using the specified row
1169                        for unit conversions, default is row=0
1170                        only necessary if frequency varies over rows.
[1846]1171
[1819]1172        Returns:
[1846]1173
[1819]1174            [min, max], [min2, max2], ...
1175                Pairs of start/end points (inclusive)specifying
1176                the masked regions
[1846]1177
[1819]1178        """
1179        if not (isinstance(mask,list) or isinstance(mask, tuple)):
1180            raise TypeError("The mask should be list or tuple.")
1181        if len(mask) < 2:
1182            raise TypeError("The mask elements should be > 1")
1183        if self.nchan() != len(mask):
1184            msg = "Number of channels in scantable != number of mask elements"
1185            raise TypeError(msg)
1186        data = self._getabcissa(row)
1187        u = self._getcoordinfo()[0]
[1859]1188        if u == "":
1189            u = "channel"
1190        msg = "The current mask window unit is %s" % u
1191        i = self._check_ifs()
1192        if not i:
1193            msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
[1924]1194        if not silent:
1195            asaplog.push(msg)
[1819]1196        masklist=[]
1197        ist, ien = None, None
1198        ist, ien=self.get_mask_indices(mask)
1199        if ist is not None and ien is not None:
1200            for i in xrange(len(ist)):
1201                range=[data[ist[i]],data[ien[i]]]
1202                range.sort()
1203                masklist.append([range[0],range[1]])
1204        return masklist
1205
1206    def get_mask_indices(self, mask=None):
[1846]1207        """\
[1819]1208        Compute and Return lists of mask start indices and mask end indices.
[1855]1209
1210        Parameters:
1211
[1819]1212            mask:       channel mask, created with create_mask.
[1846]1213
[1819]1214        Returns:
[1846]1215
[1819]1216            List of mask start indices and that of mask end indices,
1217            i.e., [istart1,istart2,....], [iend1,iend2,....].
[1846]1218
[1819]1219        """
1220        if not (isinstance(mask,list) or isinstance(mask, tuple)):
1221            raise TypeError("The mask should be list or tuple.")
1222        if len(mask) < 2:
1223            raise TypeError("The mask elements should be > 1")
1224        istart=[]
1225        iend=[]
1226        if mask[0]: istart.append(0)
1227        for i in range(len(mask)-1):
1228            if not mask[i] and mask[i+1]:
1229                istart.append(i+1)
1230            elif mask[i] and not mask[i+1]:
1231                iend.append(i)
1232        if mask[len(mask)-1]: iend.append(len(mask)-1)
1233        if len(istart) != len(iend):
1234            raise RuntimeError("Numbers of mask start != mask end.")
1235        for i in range(len(istart)):
1236            if istart[i] > iend[i]:
1237                raise RuntimeError("Mask start index > mask end index")
1238                break
1239        return istart,iend
1240
1241#    def get_restfreqs(self):
1242#        """
1243#        Get the restfrequency(s) stored in this scantable.
1244#        The return value(s) are always of unit 'Hz'
1245#        Parameters:
1246#            none
1247#        Returns:
1248#            a list of doubles
1249#        """
1250#        return list(self._getrestfreqs())
1251
1252    def get_restfreqs(self, ids=None):
[1846]1253        """\
[256]1254        Get the restfrequency(s) stored in this scantable.
1255        The return value(s) are always of unit 'Hz'
[1846]1256
[256]1257        Parameters:
[1846]1258
[1819]1259            ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1260                 be retrieved
[1846]1261
[256]1262        Returns:
[1846]1263
[1819]1264            dictionary containing ids and a list of doubles for each id
[1846]1265
[256]1266        """
[1819]1267        if ids is None:
1268            rfreqs={}
1269            idlist = self.getmolnos()
1270            for i in idlist:
1271                rfreqs[i]=list(self._getrestfreqs(i))
1272            return rfreqs
1273        else:
1274            if type(ids)==list or type(ids)==tuple:
1275                rfreqs={}
1276                for i in ids:
1277                    rfreqs[i]=list(self._getrestfreqs(i))
1278                return rfreqs
1279            else:
1280                return list(self._getrestfreqs(ids))
1281            #return list(self._getrestfreqs(ids))
[102]1282
[931]1283    def set_restfreqs(self, freqs=None, unit='Hz'):
[1846]1284        """\
[931]1285        Set or replace the restfrequency specified and
1286        If the 'freqs' argument holds a scalar,
1287        then that rest frequency will be applied to all the selected
1288        data.  If the 'freqs' argument holds
1289        a vector, then it MUST be of equal or smaller length than
1290        the number of IFs (and the available restfrequencies will be
1291        replaced by this vector).  In this case, *all* data have
1292        the restfrequency set per IF according
1293        to the corresponding value you give in the 'freqs' vector.
[1118]1294        E.g. 'freqs=[1e9, 2e9]'  would mean IF 0 gets restfreq 1e9 and
[931]1295        IF 1 gets restfreq 2e9.
[1846]1296
[1395]1297        You can also specify the frequencies via a linecatalog.
[1153]1298
[931]1299        Parameters:
[1846]1300
[931]1301            freqs:   list of rest frequency values or string idenitfiers
[1855]1302
[931]1303            unit:    unit for rest frequency (default 'Hz')
[402]1304
[1846]1305
1306        Example::
1307
[1819]1308            # set the given restfrequency for the all currently selected IFs
[931]1309            scan.set_restfreqs(freqs=1.4e9)
[1845]1310            # set restfrequencies for the n IFs  (n > 1) in the order of the
1311            # list, i.e
1312            # IF0 -> 1.4e9, IF1 ->  1.41e9, IF3 -> 1.42e9
1313            # len(list_of_restfreqs) == nIF
1314            # for nIF == 1 the following will set multiple restfrequency for
1315            # that IF
[1819]1316            scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
[1845]1317            # set multiple restfrequencies per IF. as a list of lists where
1318            # the outer list has nIF elements, the inner s arbitrary
1319            scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
[391]1320
[1846]1321       *Note*:
[1845]1322
[931]1323            To do more sophisticate Restfrequency setting, e.g. on a
1324            source and IF basis, use scantable.set_selection() before using
[1846]1325            this function::
[931]1326
[1846]1327                # provided your scantable is called scan
1328                selection = selector()
1329                selection.set_name("ORION*")
1330                selection.set_ifs([1])
1331                scan.set_selection(selection)
1332                scan.set_restfreqs(freqs=86.6e9)
1333
[931]1334        """
1335        varlist = vars()
[1157]1336        from asap import linecatalog
1337        # simple  value
[1118]1338        if isinstance(freqs, int) or isinstance(freqs, float):
[1845]1339            self._setrestfreqs([freqs], [""], unit)
[1157]1340        # list of values
[1118]1341        elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]1342            # list values are scalars
[1118]1343            if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[1845]1344                if len(freqs) == 1:
1345                    self._setrestfreqs(freqs, [""], unit)
1346                else:
1347                    # allow the 'old' mode of setting mulitple IFs
1348                    sel = selector()
1349                    savesel = self._getselection()
1350                    iflist = self.getifnos()
1351                    if len(freqs)>len(iflist):
1352                        raise ValueError("number of elements in list of list "
1353                                         "exeeds the current IF selections")
1354                    iflist = self.getifnos()
1355                    for i, fval in enumerate(freqs):
1356                        sel.set_ifs(iflist[i])
1357                        self._setselection(sel)
1358                        self._setrestfreqs([fval], [""], unit)
1359                    self._setselection(savesel)
1360
1361            # list values are dict, {'value'=, 'name'=)
[1157]1362            elif isinstance(freqs[-1], dict):
[1845]1363                values = []
1364                names = []
1365                for d in freqs:
1366                    values.append(d["value"])
1367                    names.append(d["name"])
1368                self._setrestfreqs(values, names, unit)
[1819]1369            elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
[1157]1370                sel = selector()
1371                savesel = self._getselection()
[1322]1372                iflist = self.getifnos()
[1819]1373                if len(freqs)>len(iflist):
[1845]1374                    raise ValueError("number of elements in list of list exeeds"
1375                                     " the current IF selections")
1376                for i, fval in enumerate(freqs):
[1322]1377                    sel.set_ifs(iflist[i])
[1259]1378                    self._setselection(sel)
[1845]1379                    self._setrestfreqs(fval, [""], unit)
[1157]1380                self._setselection(savesel)
1381        # freqs are to be taken from a linecatalog
[1153]1382        elif isinstance(freqs, linecatalog):
1383            sel = selector()
1384            savesel = self._getselection()
1385            for i in xrange(freqs.nrow()):
[1322]1386                sel.set_ifs(iflist[i])
[1153]1387                self._setselection(sel)
[1845]1388                self._setrestfreqs([freqs.get_frequency(i)],
1389                                   [freqs.get_name(i)], "MHz")
[1153]1390                # ensure that we are not iterating past nIF
1391                if i == self.nif()-1: break
1392            self._setselection(savesel)
[931]1393        else:
1394            return
1395        self._add_history("set_restfreqs", varlist)
1396
[1360]1397    def shift_refpix(self, delta):
[1846]1398        """\
[1589]1399        Shift the reference pixel of the Spectra Coordinate by an
1400        integer amount.
[1846]1401
[1589]1402        Parameters:
[1846]1403
[1589]1404            delta:   the amount to shift by
[1846]1405
1406        *Note*:
1407
[1589]1408            Be careful using this with broadband data.
[1846]1409
[1360]1410        """
[1731]1411        Scantable.shift_refpix(self, delta)
[931]1412
[1862]1413    @asaplog_post_dec
[1259]1414    def history(self, filename=None):
[1846]1415        """\
[1259]1416        Print the history. Optionally to a file.
[1846]1417
[1348]1418        Parameters:
[1846]1419
[1348]1420            filename:    The name  of the file to save the history to.
[1846]1421
[1259]1422        """
[484]1423        hist = list(self._gethistory())
[794]1424        out = "-"*80
[484]1425        for h in hist:
[489]1426            if h.startswith("---"):
[1857]1427                out = "\n".join([out, h])
[489]1428            else:
1429                items = h.split("##")
1430                date = items[0]
1431                func = items[1]
1432                items = items[2:]
[794]1433                out += "\n"+date+"\n"
1434                out += "Function: %s\n  Parameters:" % (func)
[489]1435                for i in items:
1436                    s = i.split("=")
[1118]1437                    out += "\n   %s = %s" % (s[0], s[1])
[1857]1438                out = "\n".join([out, "-"*80])
[1259]1439        if filename is not None:
1440            if filename is "":
1441                filename = 'scantable_history.txt'
1442            import os
1443            filename = os.path.expandvars(os.path.expanduser(filename))
1444            if not os.path.isdir(filename):
1445                data = open(filename, 'w')
1446                data.write(out)
1447                data.close()
1448            else:
1449                msg = "Illegal file name '%s'." % (filename)
[1859]1450                raise IOError(msg)
1451        return page(out)
[513]1452    #
1453    # Maths business
1454    #
[1862]1455    @asaplog_post_dec
[931]1456    def average_time(self, mask=None, scanav=False, weight='tint', align=False):
[1846]1457        """\
[1070]1458        Return the (time) weighted average of a scan.
[1846]1459
1460        *Note*:
1461
[1070]1462            in channels only - align if necessary
[1846]1463
[513]1464        Parameters:
[1846]1465
[513]1466            mask:     an optional mask (only used for 'var' and 'tsys'
1467                      weighting)
[1855]1468
[558]1469            scanav:   True averages each scan separately
1470                      False (default) averages all scans together,
[1855]1471
[1099]1472            weight:   Weighting scheme.
1473                      'none'     (mean no weight)
1474                      'var'      (1/var(spec) weighted)
1475                      'tsys'     (1/Tsys**2 weighted)
1476                      'tint'     (integration time weighted)
1477                      'tintsys'  (Tint/Tsys**2)
1478                      'median'   ( median averaging)
[535]1479                      The default is 'tint'
[1855]1480
[931]1481            align:    align the spectra in velocity before averaging. It takes
1482                      the time of the first spectrum as reference time.
[1846]1483
1484        Example::
1485
[513]1486            # time average the scantable without using a mask
[710]1487            newscan = scan.average_time()
[1846]1488
[513]1489        """
1490        varlist = vars()
[1593]1491        weight = weight or 'TINT'
1492        mask = mask or ()
1493        scanav = (scanav and 'SCAN') or 'NONE'
[1118]1494        scan = (self, )
[1859]1495
1496        if align:
1497            scan = (self.freq_align(insitu=False), )
1498        s = None
1499        if weight.upper() == 'MEDIAN':
1500            s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1501                                                     scanav))
1502        else:
1503            s = scantable(self._math._average(scan, mask, weight.upper(),
1504                          scanav))
[1099]1505        s._add_history("average_time", varlist)
[513]1506        return s
[710]1507
[1862]1508    @asaplog_post_dec
[876]1509    def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[1846]1510        """\
[513]1511        Return a scan where all spectra are converted to either
1512        Jansky or Kelvin depending upon the flux units of the scan table.
1513        By default the function tries to look the values up internally.
1514        If it can't find them (or if you want to over-ride), you must
1515        specify EITHER jyperk OR eta (and D which it will try to look up
1516        also if you don't set it). jyperk takes precedence if you set both.
[1846]1517
[513]1518        Parameters:
[1846]1519
[513]1520            jyperk:      the Jy / K conversion factor
[1855]1521
[513]1522            eta:         the aperture efficiency
[1855]1523
[513]1524            d:           the geomtric diameter (metres)
[1855]1525
[513]1526            insitu:      if False a new scantable is returned.
1527                         Otherwise, the scaling is done in-situ
1528                         The default is taken from .asaprc (False)
[1846]1529
[513]1530        """
1531        if insitu is None: insitu = rcParams['insitu']
[876]1532        self._math._setinsitu(insitu)
[513]1533        varlist = vars()
[1593]1534        jyperk = jyperk or -1.0
1535        d = d or -1.0
1536        eta = eta or -1.0
[876]1537        s = scantable(self._math._convertflux(self, d, eta, jyperk))
1538        s._add_history("convert_flux", varlist)
1539        if insitu: self._assign(s)
1540        else: return s
[513]1541
[1862]1542    @asaplog_post_dec
[876]1543    def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[1846]1544        """\
[513]1545        Return a scan after applying a gain-elevation correction.
1546        The correction can be made via either a polynomial or a
1547        table-based interpolation (and extrapolation if necessary).
1548        You specify polynomial coefficients, an ascii table or neither.
1549        If you specify neither, then a polynomial correction will be made
1550        with built in coefficients known for certain telescopes (an error
1551        will occur if the instrument is not known).
1552        The data and Tsys are *divided* by the scaling factors.
[1846]1553
[513]1554        Parameters:
[1846]1555
[513]1556            poly:        Polynomial coefficients (default None) to compute a
1557                         gain-elevation correction as a function of
1558                         elevation (in degrees).
[1855]1559
[513]1560            filename:    The name of an ascii file holding correction factors.
1561                         The first row of the ascii file must give the column
1562                         names and these MUST include columns
1563                         "ELEVATION" (degrees) and "FACTOR" (multiply data
1564                         by this) somewhere.
1565                         The second row must give the data type of the
1566                         column. Use 'R' for Real and 'I' for Integer.
1567                         An example file would be
1568                         (actual factors are arbitrary) :
1569
1570                         TIME ELEVATION FACTOR
1571                         R R R
1572                         0.1 0 0.8
1573                         0.2 20 0.85
1574                         0.3 40 0.9
1575                         0.4 60 0.85
1576                         0.5 80 0.8
1577                         0.6 90 0.75
[1855]1578
[513]1579            method:      Interpolation method when correcting from a table.
1580                         Values are  "nearest", "linear" (default), "cubic"
1581                         and "spline"
[1855]1582
[513]1583            insitu:      if False a new scantable is returned.
1584                         Otherwise, the scaling is done in-situ
1585                         The default is taken from .asaprc (False)
[1846]1586
[513]1587        """
1588
1589        if insitu is None: insitu = rcParams['insitu']
[876]1590        self._math._setinsitu(insitu)
[513]1591        varlist = vars()
[1593]1592        poly = poly or ()
[513]1593        from os.path import expandvars
1594        filename = expandvars(filename)
[876]1595        s = scantable(self._math._gainel(self, poly, filename, method))
1596        s._add_history("gain_el", varlist)
[1593]1597        if insitu:
1598            self._assign(s)
1599        else:
1600            return s
[710]1601
[1862]1602    @asaplog_post_dec
[931]1603    def freq_align(self, reftime=None, method='cubic', insitu=None):
[1846]1604        """\
[513]1605        Return a scan where all rows have been aligned in frequency/velocity.
1606        The alignment frequency frame (e.g. LSRK) is that set by function
1607        set_freqframe.
[1846]1608
[513]1609        Parameters:
[1855]1610
[513]1611            reftime:     reference time to align at. By default, the time of
1612                         the first row of data is used.
[1855]1613
[513]1614            method:      Interpolation method for regridding the spectra.
1615                         Choose from "nearest", "linear", "cubic" (default)
1616                         and "spline"
[1855]1617
[513]1618            insitu:      if False a new scantable is returned.
1619                         Otherwise, the scaling is done in-situ
1620                         The default is taken from .asaprc (False)
[1846]1621
[513]1622        """
[931]1623        if insitu is None: insitu = rcParams["insitu"]
[876]1624        self._math._setinsitu(insitu)
[513]1625        varlist = vars()
[1593]1626        reftime = reftime or ""
[931]1627        s = scantable(self._math._freq_align(self, reftime, method))
[876]1628        s._add_history("freq_align", varlist)
1629        if insitu: self._assign(s)
1630        else: return s
[513]1631
[1862]1632    @asaplog_post_dec
[1725]1633    def opacity(self, tau=None, insitu=None):
[1846]1634        """\
[513]1635        Apply an opacity correction. The data
1636        and Tsys are multiplied by the correction factor.
[1846]1637
[513]1638        Parameters:
[1855]1639
[1689]1640            tau:         (list of) opacity from which the correction factor is
[513]1641                         exp(tau*ZD)
[1689]1642                         where ZD is the zenith-distance.
1643                         If a list is provided, it has to be of length nIF,
1644                         nIF*nPol or 1 and in order of IF/POL, e.g.
1645                         [opif0pol0, opif0pol1, opif1pol0 ...]
[1725]1646                         if tau is `None` the opacities are determined from a
1647                         model.
[1855]1648
[513]1649            insitu:      if False a new scantable is returned.
1650                         Otherwise, the scaling is done in-situ
1651                         The default is taken from .asaprc (False)
[1846]1652
[513]1653        """
1654        if insitu is None: insitu = rcParams['insitu']
[876]1655        self._math._setinsitu(insitu)
[513]1656        varlist = vars()
[1689]1657        if not hasattr(tau, "__len__"):
1658            tau = [tau]
[876]1659        s = scantable(self._math._opacity(self, tau))
1660        s._add_history("opacity", varlist)
1661        if insitu: self._assign(s)
1662        else: return s
[513]1663
[1862]1664    @asaplog_post_dec
[513]1665    def bin(self, width=5, insitu=None):
[1846]1666        """\
[513]1667        Return a scan where all spectra have been binned up.
[1846]1668
[1348]1669        Parameters:
[1846]1670
[513]1671            width:       The bin width (default=5) in pixels
[1855]1672
[513]1673            insitu:      if False a new scantable is returned.
1674                         Otherwise, the scaling is done in-situ
1675                         The default is taken from .asaprc (False)
[1846]1676
[513]1677        """
1678        if insitu is None: insitu = rcParams['insitu']
[876]1679        self._math._setinsitu(insitu)
[513]1680        varlist = vars()
[876]1681        s = scantable(self._math._bin(self, width))
[1118]1682        s._add_history("bin", varlist)
[1589]1683        if insitu:
1684            self._assign(s)
1685        else:
1686            return s
[513]1687
[1862]1688    @asaplog_post_dec
[513]1689    def resample(self, width=5, method='cubic', insitu=None):
[1846]1690        """\
[1348]1691        Return a scan where all spectra have been binned up.
[1573]1692
[1348]1693        Parameters:
[1846]1694
[513]1695            width:       The bin width (default=5) in pixels
[1855]1696
[513]1697            method:      Interpolation method when correcting from a table.
1698                         Values are  "nearest", "linear", "cubic" (default)
1699                         and "spline"
[1855]1700
[513]1701            insitu:      if False a new scantable is returned.
1702                         Otherwise, the scaling is done in-situ
1703                         The default is taken from .asaprc (False)
[1846]1704
[513]1705        """
1706        if insitu is None: insitu = rcParams['insitu']
[876]1707        self._math._setinsitu(insitu)
[513]1708        varlist = vars()
[876]1709        s = scantable(self._math._resample(self, method, width))
[1118]1710        s._add_history("resample", varlist)
[876]1711        if insitu: self._assign(s)
1712        else: return s
[513]1713
[1862]1714    @asaplog_post_dec
[946]1715    def average_pol(self, mask=None, weight='none'):
[1846]1716        """\
[946]1717        Average the Polarisations together.
[1846]1718
[946]1719        Parameters:
[1846]1720
[946]1721            mask:        An optional mask defining the region, where the
1722                         averaging will be applied. The output will have all
1723                         specified points masked.
[1855]1724
[946]1725            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1726                         weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]1727
[946]1728        """
1729        varlist = vars()
[1593]1730        mask = mask or ()
[1010]1731        s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]1732        s._add_history("average_pol", varlist)
[992]1733        return s
[513]1734
[1862]1735    @asaplog_post_dec
[1145]1736    def average_beam(self, mask=None, weight='none'):
[1846]1737        """\
[1145]1738        Average the Beams together.
[1846]1739
[1145]1740        Parameters:
1741            mask:        An optional mask defining the region, where the
1742                         averaging will be applied. The output will have all
1743                         specified points masked.
[1855]1744
[1145]1745            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1746                         weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]1747
[1145]1748        """
1749        varlist = vars()
[1593]1750        mask = mask or ()
[1145]1751        s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1752        s._add_history("average_beam", varlist)
1753        return s
1754
[1586]1755    def parallactify(self, pflag):
[1846]1756        """\
[1843]1757        Set a flag to indicate whether this data should be treated as having
[1617]1758        been 'parallactified' (total phase == 0.0)
[1846]1759
[1617]1760        Parameters:
[1855]1761
[1843]1762            pflag:  Bool indicating whether to turn this on (True) or
[1617]1763                    off (False)
[1846]1764
[1617]1765        """
[1586]1766        varlist = vars()
1767        self._parallactify(pflag)
1768        self._add_history("parallactify", varlist)
1769
[1862]1770    @asaplog_post_dec
[992]1771    def convert_pol(self, poltype=None):
[1846]1772        """\
[992]1773        Convert the data to a different polarisation type.
[1565]1774        Note that you will need cross-polarisation terms for most conversions.
[1846]1775
[992]1776        Parameters:
[1855]1777
[992]1778            poltype:    The new polarisation type. Valid types are:
[1565]1779                        "linear", "circular", "stokes" and "linpol"
[1846]1780
[992]1781        """
1782        varlist = vars()
[1859]1783        s = scantable(self._math._convertpol(self, poltype))
[1118]1784        s._add_history("convert_pol", varlist)
[992]1785        return s
1786
[1862]1787    @asaplog_post_dec
[1819]1788    def smooth(self, kernel="hanning", width=5.0, order=2, plot=False, insitu=None):
[1846]1789        """\
[513]1790        Smooth the spectrum by the specified kernel (conserving flux).
[1846]1791
[513]1792        Parameters:
[1846]1793
[513]1794            kernel:     The type of smoothing kernel. Select from
[1574]1795                        'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1796                        or 'poly'
[1855]1797
[513]1798            width:      The width of the kernel in pixels. For hanning this is
1799                        ignored otherwise it defauls to 5 pixels.
1800                        For 'gaussian' it is the Full Width Half
1801                        Maximum. For 'boxcar' it is the full width.
[1574]1802                        For 'rmedian' and 'poly' it is the half width.
[1855]1803
[1574]1804            order:      Optional parameter for 'poly' kernel (default is 2), to
1805                        specify the order of the polnomial. Ignored by all other
1806                        kernels.
[1855]1807
[1819]1808            plot:       plot the original and the smoothed spectra.
1809                        In this each indivual fit has to be approved, by
1810                        typing 'y' or 'n'
[1855]1811
[513]1812            insitu:     if False a new scantable is returned.
1813                        Otherwise, the scaling is done in-situ
1814                        The default is taken from .asaprc (False)
[1846]1815
[513]1816        """
1817        if insitu is None: insitu = rcParams['insitu']
[876]1818        self._math._setinsitu(insitu)
[513]1819        varlist = vars()
[1819]1820
1821        if plot: orgscan = self.copy()
1822
[1574]1823        s = scantable(self._math._smooth(self, kernel.lower(), width, order))
[876]1824        s._add_history("smooth", varlist)
[1819]1825
1826        if plot:
1827            if rcParams['plotter.gui']:
1828                from asap.asaplotgui import asaplotgui as asaplot
1829            else:
1830                from asap.asaplot import asaplot
1831            self._p=asaplot()
1832            self._p.set_panels()
1833            ylab=s._get_ordinate_label()
1834            #self._p.palette(0,["#777777","red"])
1835            for r in xrange(s.nrow()):
1836                xsm=s._getabcissa(r)
1837                ysm=s._getspectrum(r)
1838                xorg=orgscan._getabcissa(r)
1839                yorg=orgscan._getspectrum(r)
1840                self._p.clear()
1841                self._p.hold()
1842                self._p.set_axes('ylabel',ylab)
1843                self._p.set_axes('xlabel',s._getabcissalabel(r))
1844                self._p.set_axes('title',s._getsourcename(r))
1845                self._p.set_line(label='Original',color="#777777")
1846                self._p.plot(xorg,yorg)
1847                self._p.set_line(label='Smoothed',color="red")
1848                self._p.plot(xsm,ysm)
1849                ### Ugly part for legend
1850                for i in [0,1]:
1851                    self._p.subplots[0]['lines'].append([self._p.subplots[0]['axes'].lines[i]])
1852                self._p.release()
1853                ### Ugly part for legend
1854                self._p.subplots[0]['lines']=[]
1855                res = raw_input("Accept smoothing ([y]/n): ")
1856                if res.upper() == 'N':
1857                    s._setspectrum(yorg, r)
1858            self._p.unmap()
1859            self._p = None
1860            del orgscan
1861
[876]1862        if insitu: self._assign(s)
1863        else: return s
[513]1864
[1862]1865    @asaplog_post_dec
[1907]1866    def old_poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None, rows=None):
[1846]1867        """\
[513]1868        Return a scan which has been baselined (all rows) by a polynomial.
[1907]1869       
[513]1870        Parameters:
[1846]1871
[794]1872            mask:       an optional mask
[1855]1873
[794]1874            order:      the order of the polynomial (default is 0)
[1855]1875
[1061]1876            plot:       plot the fit and the residual. In this each
1877                        indivual fit has to be approved, by typing 'y'
1878                        or 'n'
[1855]1879
[1391]1880            uselin:     use linear polynomial fit
[1855]1881
[794]1882            insitu:     if False a new scantable is returned.
1883                        Otherwise, the scaling is done in-situ
1884                        The default is taken from .asaprc (False)
[1846]1885
[1907]1886            rows:       row numbers of spectra to be processed.
1887                        (default is None: for all rows)
1888       
1889        Example:
[513]1890            # return a scan baselined by a third order polynomial,
1891            # not using a mask
1892            bscan = scan.poly_baseline(order=3)
[1846]1893
[579]1894        """
[513]1895        if insitu is None: insitu = rcParams['insitu']
[1819]1896        if not insitu:
1897            workscan = self.copy()
1898        else:
1899            workscan = self
[513]1900        varlist = vars()
1901        if mask is None:
[1907]1902            mask = [True for i in xrange(self.nchan())]
[1819]1903
[1217]1904        try:
1905            f = fitter()
[1391]1906            if uselin:
1907                f.set_function(lpoly=order)
1908            else:
1909                f.set_function(poly=order)
[1819]1910
[1907]1911            if rows == None:
1912                rows = xrange(workscan.nrow())
1913            elif isinstance(rows, int):
1914                rows = [ rows ]
1915           
[1819]1916            if len(rows) > 0:
1917                self.blpars = []
[1907]1918                self.masklists = []
1919                self.actualmask = []
1920           
[1819]1921            for r in rows:
1922                f.x = workscan._getabcissa(r)
1923                f.y = workscan._getspectrum(r)
[1907]1924                f.mask = mask_and(mask, workscan._getmask(r))    # (CAS-1434)
[1819]1925                f.data = None
1926                f.fit()
1927                if plot:
1928                    f.plot(residual=True)
1929                    x = raw_input("Accept fit ( [y]/n ): ")
1930                    if x.upper() == 'N':
1931                        self.blpars.append(None)
[1907]1932                        self.masklists.append(None)
1933                        self.actualmask.append(None)
[1819]1934                        continue
1935                workscan._setspectrum(f.fitter.getresidual(), r)
1936                self.blpars.append(f.get_parameters())
[1924]1937                self.masklists.append(workscan.get_masklist(f.mask, row=r, silent=True))
[1907]1938                self.actualmask.append(f.mask)
[1819]1939
1940            if plot:
1941                f._p.unmap()
1942                f._p = None
1943            workscan._add_history("poly_baseline", varlist)
[1856]1944            if insitu:
1945                self._assign(workscan)
1946            else:
1947                return workscan
[1217]1948        except RuntimeError:
1949            msg = "The fit failed, possibly because it didn't converge."
[1859]1950            raise RuntimeError(msg)
[513]1951
[1924]1952    @asaplog_post_dec
1953    def poly_baseline(self, mask=None, order=0, plot=False, batch=False,
1954                      insitu=None, rows=None):
[1907]1955        """\
1956        Return a scan which has been baselined (all rows) by a polynomial.
1957        Parameters:
1958            mask:       an optional mask
1959            order:      the order of the polynomial (default is 0)
1960            plot:       plot the fit and the residual. In this each
1961                        indivual fit has to be approved, by typing 'y'
1962                        or 'n'. Ignored if batch = True.
1963            batch:      if True a faster algorithm is used and logs
1964                        including the fit results are not output
1965                        (default is False)
1966            insitu:     if False a new scantable is returned.
1967                        Otherwise, the scaling is done in-situ
1968                        The default is taken from .asaprc (False)
1969            rows:       row numbers of spectra to be processed.
1970                        (default is None: for all rows)
1971        Example:
1972            # return a scan baselined by a third order polynomial,
1973            # not using a mask
1974            bscan = scan.poly_baseline(order=3)
1975        """
1976        if insitu is None: insitu = rcParams["insitu"]
[1924]1977        varlist = vars()
[1907]1978        if insitu:
1979            workscan = self
1980        else:
1981            workscan = self.copy()
1982
1983        nchan = workscan.nchan()
1984       
1985        if mask is None:
1986            mask = [True for i in xrange(nchan)]
1987
1988        try:
1989            if rows == None:
1990                rows = xrange(workscan.nrow())
1991            elif isinstance(rows, int):
1992                rows = [ rows ]
1993           
1994            if len(rows) > 0:
[1924]1995                workscan.blpars = []
1996                workscan.masklists = []
1997                workscan.actualmask = []
[1907]1998
1999            if batch:
[1924]2000                workscan._poly_baseline_batch(mask, order)
[1907]2001            elif plot:
2002                f = fitter()
2003                f.set_function(lpoly=order)
2004                for r in rows:
2005                    f.x = workscan._getabcissa(r)
2006                    f.y = workscan._getspectrum(r)
2007                    f.mask = mask_and(mask, workscan._getmask(r))    # (CAS-1434)
2008                    f.data = None
2009                    f.fit()
2010                   
2011                    f.plot(residual=True)
2012                    accept_fit = raw_input("Accept fit ( [y]/n ): ")
2013                    if accept_fit.upper() == "N":
2014                        self.blpars.append(None)
2015                        self.masklists.append(None)
2016                        self.actualmask.append(None)
2017                        continue
2018                    workscan._setspectrum(f.fitter.getresidual(), r)
[1924]2019                    workscan.blpars.append(f.get_parameters())
2020                    workscan.masklists.append(workscan.get_masklist(f.mask, row=r))
2021                    workscan.actualmask.append(f.mask)
[1907]2022                   
2023                f._p.unmap()
2024                f._p = None
2025            else:
2026                for r in rows:
[1924]2027                    fitparams = workscan._poly_baseline(mask, order, r)
2028                    params = fitparams.getparameters()
2029                    fmtd = ", ".join(["p%d = %3.6f" % (i, v) for i, v in enumerate(params)])
2030                    errors = fitparams.geterrors()
2031                    fmask = mask_and(mask, workscan._getmask(r))
2032
2033                    workscan.blpars.append({"params":params,
2034                                            "fixed": fitparams.getfixedparameters(),
2035                                            "formatted":fmtd, "errors":errors})
2036                    workscan.masklists.append(workscan.get_masklist(fmask, r, silent=True))
2037                    workscan.actualmask.append(fmask)
[1907]2038                   
[1924]2039                    asaplog.push(fmtd)
[1907]2040           
2041            workscan._add_history("poly_baseline", varlist)
2042           
2043            if insitu:
2044                self._assign(workscan)
2045            else:
2046                return workscan
2047           
[1919]2048        except RuntimeError, e:
[1907]2049            msg = "The fit failed, possibly because it didn't converge."
2050            if rcParams["verbose"]:
[1919]2051                asaplog.push(str(e))
[1907]2052                asaplog.push(str(msg))
2053                return
2054            else:
[1919]2055                raise RuntimeError(str(e)+'\n'+msg)
[1907]2056
2057
2058    def auto_poly_baseline(self, mask=None, edge=(0, 0), order=0,
[1280]2059                           threshold=3, chan_avg_limit=1, plot=False,
[1907]2060                           insitu=None, rows=None):
[1846]2061        """\
[880]2062        Return a scan which has been baselined (all rows) by a polynomial.
2063        Spectral lines are detected first using linefinder and masked out
2064        to avoid them affecting the baseline solution.
2065
2066        Parameters:
[1846]2067
[880]2068            mask:       an optional mask retreived from scantable
[1846]2069
2070            edge:       an optional number of channel to drop at the edge of
2071                        spectrum. If only one value is
[880]2072                        specified, the same number will be dropped from
2073                        both sides of the spectrum. Default is to keep
[907]2074                        all channels. Nested tuples represent individual
[976]2075                        edge selection for different IFs (a number of spectral
2076                        channels can be different)
[1846]2077
[880]2078            order:      the order of the polynomial (default is 0)
[1846]2079
[880]2080            threshold:  the threshold used by line finder. It is better to
2081                        keep it large as only strong lines affect the
2082                        baseline solution.
[1846]2083
[1280]2084            chan_avg_limit:
2085                        a maximum number of consequtive spectral channels to
2086                        average during the search of weak and broad lines.
2087                        The default is no averaging (and no search for weak
2088                        lines). If such lines can affect the fitted baseline
2089                        (e.g. a high order polynomial is fitted), increase this
2090                        parameter (usually values up to 8 are reasonable). Most
2091                        users of this method should find the default value
2092                        sufficient.
[1846]2093
[1061]2094            plot:       plot the fit and the residual. In this each
2095                        indivual fit has to be approved, by typing 'y'
2096                        or 'n'
[1846]2097
[880]2098            insitu:     if False a new scantable is returned.
2099                        Otherwise, the scaling is done in-situ
2100                        The default is taken from .asaprc (False)
[1907]2101            rows:       row numbers of spectra to be processed.
2102                        (default is None: for all rows)
[880]2103
[1846]2104
2105        Example::
2106
2107            scan2 = scan.auto_poly_baseline(order=7, insitu=False)
2108
[880]2109        """
2110        if insitu is None: insitu = rcParams['insitu']
2111        varlist = vars()
2112        from asap.asaplinefind import linefinder
2113        from asap import _is_sequence_or_number as _is_valid
2114
[976]2115        # check whether edge is set up for each IF individually
[1118]2116        individualedge = False;
2117        if len(edge) > 1:
2118            if isinstance(edge[0], list) or isinstance(edge[0], tuple):
2119                individualedge = True;
[907]2120
[1118]2121        if not _is_valid(edge, int) and not individualedge:
[909]2122            raise ValueError, "Parameter 'edge' has to be an integer or a \
[907]2123            pair of integers specified as a tuple. Nested tuples are allowed \
2124            to make individual selection for different IFs."
[919]2125
[1118]2126        curedge = (0, 0)
2127        if individualedge:
2128            for edgepar in edge:
2129                if not _is_valid(edgepar, int):
2130                    raise ValueError, "Each element of the 'edge' tuple has \
2131                                       to be a pair of integers or an integer."
[907]2132        else:
[1118]2133            curedge = edge;
[880]2134
[1907]2135        if not insitu:
2136            workscan = self.copy()
2137        else:
2138            workscan = self
2139
[880]2140        # setup fitter
2141        f = fitter()
[1907]2142        f.set_function(lpoly=order)
[880]2143
2144        # setup line finder
[1118]2145        fl = linefinder()
[1268]2146        fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
[880]2147
[907]2148        fl.set_scan(workscan)
2149
[1907]2150        if mask is None:
2151            mask = _n_bools(workscan.nchan(), True)
2152       
2153        if rows is None:
2154            rows = xrange(workscan.nrow())
2155        elif isinstance(rows, int):
2156            rows = [ rows ]
2157       
[1819]2158        # Save parameters of baseline fits & masklists as a class attribute.
2159        # NOTICE: It does not reflect changes in scantable!
2160        if len(rows) > 0:
2161            self.blpars=[]
2162            self.masklists=[]
[1907]2163            self.actualmask=[]
[880]2164        asaplog.push("Processing:")
2165        for r in rows:
[1118]2166            msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
2167                (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
2168                 workscan.getpol(r), workscan.getcycle(r))
[880]2169            asaplog.push(msg, False)
[907]2170
[976]2171            # figure out edge parameter
[1118]2172            if individualedge:
2173                if len(edge) >= workscan.getif(r):
2174                    raise RuntimeError, "Number of edge elements appear to " \
2175                                        "be less than the number of IFs"
2176                    curedge = edge[workscan.getif(r)]
[919]2177
[1907]2178            actualmask = mask_and(mask, workscan._getmask(r))    # (CAS-1434)
[1819]2179
[976]2180            # setup line finder
[1819]2181            fl.find_lines(r, actualmask, curedge)
[1907]2182           
[1819]2183            f.x = workscan._getabcissa(r)
2184            f.y = workscan._getspectrum(r)
[1907]2185            f.mask = fl.get_mask()
[1819]2186            f.data = None
[880]2187            f.fit()
[1819]2188
2189            # Show mask list
[1924]2190            masklist=workscan.get_masklist(f.mask, row=r, silent=True)
[1819]2191            msg = "mask range: "+str(masklist)
2192            asaplog.push(msg, False)
2193
[1061]2194            if plot:
2195                f.plot(residual=True)
2196                x = raw_input("Accept fit ( [y]/n ): ")
2197                if x.upper() == 'N':
[1819]2198                    self.blpars.append(None)
2199                    self.masklists.append(None)
[1907]2200                    self.actualmask.append(None)
[1061]2201                    continue
[1819]2202
[880]2203            workscan._setspectrum(f.fitter.getresidual(), r)
[1819]2204            self.blpars.append(f.get_parameters())
2205            self.masklists.append(masklist)
[1907]2206            self.actualmask.append(f.mask)
[1061]2207        if plot:
2208            f._p.unmap()
2209            f._p = None
2210        workscan._add_history("auto_poly_baseline", varlist)
[880]2211        if insitu:
2212            self._assign(workscan)
2213        else:
2214            return workscan
2215
[1862]2216    @asaplog_post_dec
[914]2217    def rotate_linpolphase(self, angle):
[1846]2218        """\
[914]2219        Rotate the phase of the complex polarization O=Q+iU correlation.
2220        This is always done in situ in the raw data.  So if you call this
2221        function more than once then each call rotates the phase further.
[1846]2222
[914]2223        Parameters:
[1846]2224
[914]2225            angle:   The angle (degrees) to rotate (add) by.
[1846]2226
2227        Example::
2228
[914]2229            scan.rotate_linpolphase(2.3)
[1846]2230
[914]2231        """
2232        varlist = vars()
[936]2233        self._math._rotate_linpolphase(self, angle)
[914]2234        self._add_history("rotate_linpolphase", varlist)
2235        return
[710]2236
[1862]2237    @asaplog_post_dec
[914]2238    def rotate_xyphase(self, angle):
[1846]2239        """\
[914]2240        Rotate the phase of the XY correlation.  This is always done in situ
2241        in the data.  So if you call this function more than once
2242        then each call rotates the phase further.
[1846]2243
[914]2244        Parameters:
[1846]2245
[914]2246            angle:   The angle (degrees) to rotate (add) by.
[1846]2247
2248        Example::
2249
[914]2250            scan.rotate_xyphase(2.3)
[1846]2251
[914]2252        """
2253        varlist = vars()
[936]2254        self._math._rotate_xyphase(self, angle)
[914]2255        self._add_history("rotate_xyphase", varlist)
2256        return
2257
[1862]2258    @asaplog_post_dec
[914]2259    def swap_linears(self):
[1846]2260        """\
[1573]2261        Swap the linear polarisations XX and YY, or better the first two
[1348]2262        polarisations as this also works for ciculars.
[914]2263        """
2264        varlist = vars()
[936]2265        self._math._swap_linears(self)
[914]2266        self._add_history("swap_linears", varlist)
2267        return
2268
[1862]2269    @asaplog_post_dec
[914]2270    def invert_phase(self):
[1846]2271        """\
[914]2272        Invert the phase of the complex polarisation
2273        """
2274        varlist = vars()
[936]2275        self._math._invert_phase(self)
[914]2276        self._add_history("invert_phase", varlist)
2277        return
2278
[1862]2279    @asaplog_post_dec
[876]2280    def add(self, offset, insitu=None):
[1846]2281        """\
[513]2282        Return a scan where all spectra have the offset added
[1846]2283
[513]2284        Parameters:
[1846]2285
[513]2286            offset:      the offset
[1855]2287
[513]2288            insitu:      if False a new scantable is returned.
2289                         Otherwise, the scaling is done in-situ
2290                         The default is taken from .asaprc (False)
[1846]2291
[513]2292        """
2293        if insitu is None: insitu = rcParams['insitu']
[876]2294        self._math._setinsitu(insitu)
[513]2295        varlist = vars()
[876]2296        s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]2297        s._add_history("add", varlist)
[876]2298        if insitu:
2299            self._assign(s)
2300        else:
[513]2301            return s
2302
[1862]2303    @asaplog_post_dec
[1308]2304    def scale(self, factor, tsys=True, insitu=None):
[1846]2305        """\
2306
[513]2307        Return a scan where all spectra are scaled by the give 'factor'
[1846]2308
[513]2309        Parameters:
[1846]2310
[1819]2311            factor:      the scaling factor (float or 1D float list)
[1855]2312
[513]2313            insitu:      if False a new scantable is returned.
2314                         Otherwise, the scaling is done in-situ
2315                         The default is taken from .asaprc (False)
[1855]2316
[513]2317            tsys:        if True (default) then apply the operation to Tsys
2318                         as well as the data
[1846]2319
[513]2320        """
2321        if insitu is None: insitu = rcParams['insitu']
[876]2322        self._math._setinsitu(insitu)
[513]2323        varlist = vars()
[1819]2324        s = None
2325        import numpy
2326        if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
2327            if isinstance(factor[0], list) or isinstance(factor[0], numpy.ndarray):
2328                from asapmath import _array2dOp
2329                s = _array2dOp( self.copy(), factor, "MUL", tsys )
2330            else:
2331                s = scantable( self._math._arrayop( self.copy(), factor, "MUL", tsys ) )
2332        else:
2333            s = scantable(self._math._unaryop(self.copy(), factor, "MUL", tsys))
[1118]2334        s._add_history("scale", varlist)
[876]2335        if insitu:
2336            self._assign(s)
2337        else:
[513]2338            return s
2339
[1504]2340    def set_sourcetype(self, match, matchtype="pattern",
2341                       sourcetype="reference"):
[1846]2342        """\
[1502]2343        Set the type of the source to be an source or reference scan
[1846]2344        using the provided pattern.
2345
[1502]2346        Parameters:
[1846]2347
[1504]2348            match:          a Unix style pattern, regular expression or selector
[1855]2349
[1504]2350            matchtype:      'pattern' (default) UNIX style pattern or
2351                            'regex' regular expression
[1855]2352
[1502]2353            sourcetype:     the type of the source to use (source/reference)
[1846]2354
[1502]2355        """
2356        varlist = vars()
2357        basesel = self.get_selection()
2358        stype = -1
2359        if sourcetype.lower().startswith("r"):
2360            stype = 1
2361        elif sourcetype.lower().startswith("s"):
2362            stype = 0
[1504]2363        else:
[1502]2364            raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
[1504]2365        if matchtype.lower().startswith("p"):
2366            matchtype = "pattern"
2367        elif matchtype.lower().startswith("r"):
2368            matchtype = "regex"
2369        else:
2370            raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
[1502]2371        sel = selector()
2372        if isinstance(match, selector):
2373            sel = match
2374        else:
[1504]2375            sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
[1502]2376        self.set_selection(basesel+sel)
2377        self._setsourcetype(stype)
2378        self.set_selection(basesel)
[1573]2379        self._add_history("set_sourcetype", varlist)
[1502]2380
[1862]2381    @asaplog_post_dec
[1857]2382    @preserve_selection
[1819]2383    def auto_quotient(self, preserve=True, mode='paired', verify=False):
[1846]2384        """\
[670]2385        This function allows to build quotients automatically.
[1819]2386        It assumes the observation to have the same number of
[670]2387        "ons" and "offs"
[1846]2388
[670]2389        Parameters:
[1846]2390
[710]2391            preserve:       you can preserve (default) the continuum or
2392                            remove it.  The equations used are
[1857]2393
[670]2394                            preserve: Output = Toff * (on/off) - Toff
[1857]2395
[1070]2396                            remove:   Output = Toff * (on/off) - Ton
[1855]2397
[1573]2398            mode:           the on/off detection mode
[1348]2399                            'paired' (default)
2400                            identifies 'off' scans by the
2401                            trailing '_R' (Mopra/Parkes) or
2402                            '_e'/'_w' (Tid) and matches
2403                            on/off pairs from the observing pattern
[1502]2404                            'time'
2405                            finds the closest off in time
[1348]2406
[1857]2407        .. todo:: verify argument is not implemented
2408
[670]2409        """
[1857]2410        varlist = vars()
[1348]2411        modes = ["time", "paired"]
[670]2412        if not mode in modes:
[876]2413            msg = "please provide valid mode. Valid modes are %s" % (modes)
2414            raise ValueError(msg)
[1348]2415        s = None
2416        if mode.lower() == "paired":
[1857]2417            sel = self.get_selection()
[1875]2418            sel.set_query("SRCTYPE==psoff")
[1356]2419            self.set_selection(sel)
[1348]2420            offs = self.copy()
[1875]2421            sel.set_query("SRCTYPE==pson")
[1356]2422            self.set_selection(sel)
[1348]2423            ons = self.copy()
2424            s = scantable(self._math._quotient(ons, offs, preserve))
2425        elif mode.lower() == "time":
2426            s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]2427        s._add_history("auto_quotient", varlist)
[876]2428        return s
[710]2429
[1862]2430    @asaplog_post_dec
[1145]2431    def mx_quotient(self, mask = None, weight='median', preserve=True):
[1846]2432        """\
[1143]2433        Form a quotient using "off" beams when observing in "MX" mode.
[1846]2434
[1143]2435        Parameters:
[1846]2436
[1145]2437            mask:           an optional mask to be used when weight == 'stddev'
[1855]2438
[1143]2439            weight:         How to average the off beams.  Default is 'median'.
[1855]2440
[1145]2441            preserve:       you can preserve (default) the continuum or
[1855]2442                            remove it.  The equations used are:
[1846]2443
[1855]2444                                preserve: Output = Toff * (on/off) - Toff
2445
2446                                remove:   Output = Toff * (on/off) - Ton
2447
[1217]2448        """
[1593]2449        mask = mask or ()
[1141]2450        varlist = vars()
2451        on = scantable(self._math._mx_extract(self, 'on'))
[1143]2452        preoff = scantable(self._math._mx_extract(self, 'off'))
2453        off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1217]2454        from asapmath  import quotient
[1145]2455        q = quotient(on, off, preserve)
[1143]2456        q._add_history("mx_quotient", varlist)
[1217]2457        return q
[513]2458
[1862]2459    @asaplog_post_dec
[718]2460    def freq_switch(self, insitu=None):
[1846]2461        """\
[718]2462        Apply frequency switching to the data.
[1846]2463
[718]2464        Parameters:
[1846]2465
[718]2466            insitu:      if False a new scantable is returned.
2467                         Otherwise, the swictching is done in-situ
2468                         The default is taken from .asaprc (False)
[1846]2469
[718]2470        """
2471        if insitu is None: insitu = rcParams['insitu']
[876]2472        self._math._setinsitu(insitu)
[718]2473        varlist = vars()
[876]2474        s = scantable(self._math._freqswitch(self))
[1118]2475        s._add_history("freq_switch", varlist)
[1856]2476        if insitu:
2477            self._assign(s)
2478        else:
2479            return s
[718]2480
[1862]2481    @asaplog_post_dec
[780]2482    def recalc_azel(self):
[1846]2483        """Recalculate the azimuth and elevation for each position."""
[780]2484        varlist = vars()
[876]2485        self._recalcazel()
[780]2486        self._add_history("recalc_azel", varlist)
2487        return
2488
[1862]2489    @asaplog_post_dec
[513]2490    def __add__(self, other):
2491        varlist = vars()
2492        s = None
2493        if isinstance(other, scantable):
[1573]2494            s = scantable(self._math._binaryop(self, other, "ADD"))
[513]2495        elif isinstance(other, float):
[876]2496            s = scantable(self._math._unaryop(self, other, "ADD", False))
[513]2497        else:
[718]2498            raise TypeError("Other input is not a scantable or float value")
[513]2499        s._add_history("operator +", varlist)
2500        return s
2501
[1862]2502    @asaplog_post_dec
[513]2503    def __sub__(self, other):
2504        """
2505        implicit on all axes and on Tsys
2506        """
2507        varlist = vars()
2508        s = None
2509        if isinstance(other, scantable):
[1588]2510            s = scantable(self._math._binaryop(self, other, "SUB"))
[513]2511        elif isinstance(other, float):
[876]2512            s = scantable(self._math._unaryop(self, other, "SUB", False))
[513]2513        else:
[718]2514            raise TypeError("Other input is not a scantable or float value")
[513]2515        s._add_history("operator -", varlist)
2516        return s
[710]2517
[1862]2518    @asaplog_post_dec
[513]2519    def __mul__(self, other):
2520        """
2521        implicit on all axes and on Tsys
2522        """
2523        varlist = vars()
2524        s = None
2525        if isinstance(other, scantable):
[1588]2526            s = scantable(self._math._binaryop(self, other, "MUL"))
[513]2527        elif isinstance(other, float):
[876]2528            s = scantable(self._math._unaryop(self, other, "MUL", False))
[513]2529        else:
[718]2530            raise TypeError("Other input is not a scantable or float value")
[513]2531        s._add_history("operator *", varlist)
2532        return s
2533
[710]2534
[1862]2535    @asaplog_post_dec
[513]2536    def __div__(self, other):
2537        """
2538        implicit on all axes and on Tsys
2539        """
2540        varlist = vars()
2541        s = None
2542        if isinstance(other, scantable):
[1589]2543            s = scantable(self._math._binaryop(self, other, "DIV"))
[513]2544        elif isinstance(other, float):
2545            if other == 0.0:
[718]2546                raise ZeroDivisionError("Dividing by zero is not recommended")
[876]2547            s = scantable(self._math._unaryop(self, other, "DIV", False))
[513]2548        else:
[718]2549            raise TypeError("Other input is not a scantable or float value")
[513]2550        s._add_history("operator /", varlist)
2551        return s
2552
[1862]2553    @asaplog_post_dec
[530]2554    def get_fit(self, row=0):
[1846]2555        """\
[530]2556        Print or return the stored fits for a row in the scantable
[1846]2557
[530]2558        Parameters:
[1846]2559
[530]2560            row:    the row which the fit has been applied to.
[1846]2561
[530]2562        """
2563        if row > self.nrow():
2564            return
[976]2565        from asap.asapfit import asapfit
[530]2566        fit = asapfit(self._getfit(row))
[1859]2567        asaplog.push( '%s' %(fit) )
2568        return fit.as_dict()
[530]2569
[1483]2570    def flag_nans(self):
[1846]2571        """\
[1483]2572        Utility function to flag NaN values in the scantable.
2573        """
2574        import numpy
2575        basesel = self.get_selection()
2576        for i in range(self.nrow()):
[1589]2577            sel = self.get_row_selector(i)
2578            self.set_selection(basesel+sel)
[1483]2579            nans = numpy.isnan(self._getspectrum(0))
2580        if numpy.any(nans):
2581            bnans = [ bool(v) for v in nans]
2582            self.flag(bnans)
2583        self.set_selection(basesel)
2584
[1588]2585    def get_row_selector(self, rowno):
2586        return selector(beams=self.getbeam(rowno),
2587                        ifs=self.getif(rowno),
2588                        pols=self.getpol(rowno),
2589                        scans=self.getscan(rowno),
2590                        cycles=self.getcycle(rowno))
[1573]2591
[484]2592    def _add_history(self, funcname, parameters):
[1435]2593        if not rcParams['scantable.history']:
2594            return
[484]2595        # create date
2596        sep = "##"
2597        from datetime import datetime
2598        dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
2599        hist = dstr+sep
2600        hist += funcname+sep#cdate+sep
2601        if parameters.has_key('self'): del parameters['self']
[1118]2602        for k, v in parameters.iteritems():
[484]2603            if type(v) is dict:
[1118]2604                for k2, v2 in v.iteritems():
[484]2605                    hist += k2
2606                    hist += "="
[1118]2607                    if isinstance(v2, scantable):
[484]2608                        hist += 'scantable'
2609                    elif k2 == 'mask':
[1118]2610                        if isinstance(v2, list) or isinstance(v2, tuple):
[513]2611                            hist += str(self._zip_mask(v2))
2612                        else:
2613                            hist += str(v2)
[484]2614                    else:
[513]2615                        hist += str(v2)
[484]2616            else:
2617                hist += k
2618                hist += "="
[1118]2619                if isinstance(v, scantable):
[484]2620                    hist += 'scantable'
2621                elif k == 'mask':
[1118]2622                    if isinstance(v, list) or isinstance(v, tuple):
[513]2623                        hist += str(self._zip_mask(v))
2624                    else:
2625                        hist += str(v)
[484]2626                else:
2627                    hist += str(v)
2628            hist += sep
2629        hist = hist[:-2] # remove trailing '##'
2630        self._addhistory(hist)
2631
[710]2632
[484]2633    def _zip_mask(self, mask):
2634        mask = list(mask)
2635        i = 0
2636        segments = []
2637        while mask[i:].count(1):
2638            i += mask[i:].index(1)
2639            if mask[i:].count(0):
2640                j = i + mask[i:].index(0)
2641            else:
[710]2642                j = len(mask)
[1118]2643            segments.append([i, j])
[710]2644            i = j
[484]2645        return segments
[714]2646
[626]2647    def _get_ordinate_label(self):
2648        fu = "("+self.get_fluxunit()+")"
2649        import re
2650        lbl = "Intensity"
[1118]2651        if re.match(".K.", fu):
[626]2652            lbl = "Brightness Temperature "+ fu
[1118]2653        elif re.match(".Jy.", fu):
[626]2654            lbl = "Flux density "+ fu
2655        return lbl
[710]2656
[876]2657    def _check_ifs(self):
2658        nchans = [self.nchan(i) for i in range(self.nif(-1))]
[889]2659        nchans = filter(lambda t: t > 0, nchans)
[876]2660        return (sum(nchans)/len(nchans) == nchans[0])
[976]2661
[1862]2662    @asaplog_post_dec
[1916]2663    #def _fill(self, names, unit, average, getpt, antenna):
2664    def _fill(self, names, unit, average, opts={}):
[976]2665        first = True
2666        fullnames = []
2667        for name in names:
2668            name = os.path.expandvars(name)
2669            name = os.path.expanduser(name)
2670            if not os.path.exists(name):
2671                msg = "File '%s' does not exists" % (name)
2672                raise IOError(msg)
2673            fullnames.append(name)
2674        if average:
2675            asaplog.push('Auto averaging integrations')
[1079]2676        stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]2677        for name in fullnames:
[1073]2678            tbl = Scantable(stype)
[1843]2679            r = filler(tbl)
[1504]2680            rx = rcParams['scantable.reference']
[1843]2681            r.setreferenceexpr(rx)
[976]2682            msg = "Importing %s..." % (name)
[1118]2683            asaplog.push(msg, False)
[1916]2684            #opts = {'ms': {'antenna' : antenna, 'getpt': getpt} }
[1904]2685            r.open(name, opts)# antenna, -1, -1, getpt)
[1843]2686            r.fill()
[976]2687            if average:
[1118]2688                tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]2689            if not first:
2690                tbl = self._math._merge([self, tbl])
2691            Scantable.__init__(self, tbl)
[1843]2692            r.close()
[1118]2693            del r, tbl
[976]2694            first = False
[1861]2695            #flush log
2696        asaplog.post()
[976]2697        if unit is not None:
2698            self.set_fluxunit(unit)
[1824]2699        if not is_casapy():
2700            self.set_freqframe(rcParams['scantable.freqframe'])
[976]2701
[1402]2702    def __getitem__(self, key):
2703        if key < 0:
2704            key += self.nrow()
2705        if key >= self.nrow():
2706            raise IndexError("Row index out of range.")
2707        return self._getspectrum(key)
2708
2709    def __setitem__(self, key, value):
2710        if key < 0:
2711            key += self.nrow()
2712        if key >= self.nrow():
2713            raise IndexError("Row index out of range.")
2714        if not hasattr(value, "__len__") or \
2715                len(value) > self.nchan(self.getif(key)):
2716            raise ValueError("Spectrum length doesn't match.")
2717        return self._setspectrum(value, key)
2718
2719    def __len__(self):
2720        return self.nrow()
2721
2722    def __iter__(self):
2723        for i in range(len(self)):
2724            yield self[i]
Note: See TracBrowser for help on using the repository browser.