source: trunk/python/scantable.py @ 1938

Last change on this file since 1938 was 1938, checked in by Malte Marquarding, 14 years ago

merge from branches/asap4casa3.1.0. Should be done the other way

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 92.3 KB
RevLine 
[1846]1"""This module defines the scantable class."""
2
[1697]3import os
[1691]4try:
5    from functools import wraps as wraps_dec
6except ImportError:
7    from asap.compatibility import wraps as wraps_dec
8
[1824]9from asap.env import is_casapy
[876]10from asap._asap import Scantable
[1843]11from asap._asap import filler
[1824]12from asap.parameters import rcParams
[1862]13from asap.logging import asaplog, asaplog_post_dec
[1824]14from asap.selector import selector
15from asap.linecatalog import linecatalog
[1600]16from asap.coordinate import coordinate
[1859]17from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
[1907]18from asap.asapfitter import fitter
[102]19
[1689]20
21def preserve_selection(func):
[1691]22    @wraps_dec(func)
[1689]23    def wrap(obj, *args, **kw):
24        basesel = obj.get_selection()
[1857]25        try:
26            val = func(obj, *args, **kw)
27        finally:
28            obj.set_selection(basesel)
[1689]29        return val
30    return wrap
31
[1846]32def is_scantable(filename):
33    """Is the given file a scantable?
[1689]34
[1846]35    Parameters:
36
37        filename: the name of the file/directory to test
38
39    """
[1883]40    if ( os.path.isdir(filename)
41         and os.path.exists(filename+'/table.info')
42         and os.path.exists(filename+'/table.dat') ):
43        f=open(filename+'/table.info')
44        l=f.readline()
45        f.close()
46        #if ( l.find('Scantable') != -1 ):
47        if ( l.find('Measurement Set') == -1 ):
48            return True
49        else:
50            return False
51    else:
52        return False
53##     return (os.path.isdir(filename)
54##             and not os.path.exists(filename+'/table.f1')
55##             and os.path.exists(filename+'/table.info'))
[1697]56
[1883]57def is_ms(filename):
58    """Is the given file a MeasurementSet?
[1697]59
[1883]60    Parameters:
61
62        filename: the name of the file/directory to test
63
64    """
65    if ( os.path.isdir(filename)
66         and os.path.exists(filename+'/table.info')
67         and os.path.exists(filename+'/table.dat') ):
68        f=open(filename+'/table.info')
69        l=f.readline()
70        f.close()
71        if ( l.find('Measurement Set') != -1 ):
72            return True
73        else:
74            return False
75    else:
76        return False
77   
[876]78class scantable(Scantable):
[1846]79    """\
80        The ASAP container for scans (single-dish data).
[102]81    """
[1819]82
[1862]83    @asaplog_post_dec
[1916]84    #def __init__(self, filename, average=None, unit=None, getpt=None,
85    #             antenna=None, parallactify=None):
86    def __init__(self, filename, average=None, unit=None, parallactify=None, **args):
[1846]87        """\
[102]88        Create a scantable from a saved one or make a reference
[1846]89
[102]90        Parameters:
[1846]91
92            filename:     the name of an asap table on disk
93                          or
94                          the name of a rpfits/sdfits/ms file
95                          (integrations within scans are auto averaged
96                          and the whole file is read) or
97                          [advanced] a reference to an existing scantable
98
99            average:      average all integrations withinb a scan on read.
100                          The default (True) is taken from .asaprc.
101
[484]102            unit:         brightness unit; must be consistent with K or Jy.
[1846]103                          Over-rides the default selected by the filler
104                          (input rpfits/sdfits/ms) or replaces the value
105                          in existing scantables
106
107            getpt:        for MeasurementSet input data only:
108                          If True, all pointing data are filled.
109                          The deafult is False, which makes time to load
110                          the MS data faster in some cases.
111
[1920]112            antenna:      for MeasurementSet input data only:
113                          Antenna selection. integer (id) or string (name or id).
[1846]114
115            parallactify: Indicate that the data had been parallatified. Default
116                          is taken from rc file.
117
[710]118        """
[976]119        if average is None:
[710]120            average = rcParams['scantable.autoaverage']
[1916]121        #if getpt is None:
122        #    getpt = True
123        #if antenna is not None:
124        #    asaplog.push("Antenna selection currently unsupported."
125        #                 "Using ''")
126        #    asaplog.post('WARN')
127        #if antenna is None:
128        #    antenna = ''
129        #elif type(antenna) == int:
130        #    antenna = '%s' % antenna
131        #elif type(antenna) == list:
132        #    tmpstr = ''
133        #    for i in range( len(antenna) ):
134        #        if type(antenna[i]) == int:
135        #            tmpstr = tmpstr + ('%s,'%(antenna[i]))
136        #        elif type(antenna[i]) == str:
137        #            tmpstr=tmpstr+antenna[i]+','
138        #        else:
139        #            raise TypeError('Bad antenna selection.')
140        #    antenna = tmpstr.rstrip(',')
[1593]141        parallactify = parallactify or rcParams['scantable.parallactify']
[1259]142        varlist = vars()
[876]143        from asap._asap import stmath
[1819]144        self._math = stmath( rcParams['insitu'] )
[876]145        if isinstance(filename, Scantable):
146            Scantable.__init__(self, filename)
[181]147        else:
[1697]148            if isinstance(filename, str):
[976]149                filename = os.path.expandvars(filename)
150                filename = os.path.expanduser(filename)
151                if not os.path.exists(filename):
152                    s = "File '%s' not found." % (filename)
153                    raise IOError(s)
[1697]154                if is_scantable(filename):
155                    ondisk = rcParams['scantable.storage'] == 'disk'
156                    Scantable.__init__(self, filename, ondisk)
157                    if unit is not None:
158                        self.set_fluxunit(unit)
[1819]159                    # do not reset to the default freqframe
160                    #self.set_freqframe(rcParams['scantable.freqframe'])
[1883]161                #elif os.path.isdir(filename) \
162                #         and not os.path.exists(filename+'/table.f1'):
163                elif is_ms(filename):
[1916]164                    # Measurement Set
165                    opts={'ms': {}}
166                    mskeys=['getpt','antenna']
167                    for key in mskeys:
168                        if key in args.keys():
169                            opts['ms'][key] = args[key]
170                    #self._fill([filename], unit, average, getpt, antenna)
171                    self._fill([filename], unit, average, opts)
[1893]172                elif os.path.isfile(filename):
[1916]173                    #self._fill([filename], unit, average, getpt, antenna)
174                    self._fill([filename], unit, average)
[1883]175                else:
[1819]176                    msg = "The given file '%s'is not a valid " \
177                          "asap table." % (filename)
[1859]178                    raise IOError(msg)
[1118]179            elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]180                  and isinstance(filename[-1], str):
[1916]181                #self._fill(filename, unit, average, getpt, antenna)
182                self._fill(filename, unit, average)
[1586]183        self.parallactify(parallactify)
[1259]184        self._add_history("scantable", varlist)
[102]185
[1862]186    @asaplog_post_dec
[876]187    def save(self, name=None, format=None, overwrite=False):
[1846]188        """\
[1280]189        Store the scantable on disk. This can be an asap (aips++) Table,
190        SDFITS or MS2 format.
[1846]191
[116]192        Parameters:
[1846]193
[1093]194            name:        the name of the outputfile. For format "ASCII"
195                         this is the root file name (data in 'name'.txt
[497]196                         and header in 'name'_header.txt)
[1855]197
[116]198            format:      an optional file format. Default is ASAP.
[1855]199                         Allowed are:
200
201                            * 'ASAP' (save as ASAP [aips++] Table),
202                            * 'SDFITS' (save as SDFITS file)
203                            * 'ASCII' (saves as ascii text file)
204                            * 'MS2' (saves as an casacore MeasurementSet V2)
205                            * 'FITS' (save as image FITS - not readable by class)
206                            * 'CLASS' (save as FITS readable by CLASS)
207
[411]208            overwrite:   If the file should be overwritten if it exists.
[256]209                         The default False is to return with warning
[411]210                         without writing the output. USE WITH CARE.
[1855]211
[1846]212        Example::
213
[116]214            scan.save('myscan.asap')
[1118]215            scan.save('myscan.sdfits', 'SDFITS')
[1846]216
[116]217        """
[411]218        from os import path
[1593]219        format = format or rcParams['scantable.save']
[256]220        suffix = '.'+format.lower()
[1118]221        if name is None or name == "":
[256]222            name = 'scantable'+suffix
[718]223            msg = "No filename given. Using default name %s..." % name
224            asaplog.push(msg)
[411]225        name = path.expandvars(name)
[256]226        if path.isfile(name) or path.isdir(name):
227            if not overwrite:
[718]228                msg = "File %s exists." % name
[1859]229                raise IOError(msg)
[451]230        format2 = format.upper()
231        if format2 == 'ASAP':
[116]232            self._save(name)
233        else:
[989]234            from asap._asap import stwriter as stw
[1118]235            writer = stw(format2)
236            writer.write(self, name)
[116]237        return
238
[102]239    def copy(self):
[1846]240        """Return a copy of this scantable.
241
242        *Note*:
243
[1348]244            This makes a full (deep) copy. scan2 = scan1 makes a reference.
[1846]245
246        Example::
247
[102]248            copiedscan = scan.copy()
[1846]249
[102]250        """
[876]251        sd = scantable(Scantable._copy(self))
[113]252        return sd
253
[1093]254    def drop_scan(self, scanid=None):
[1846]255        """\
[1093]256        Return a new scantable where the specified scan number(s) has(have)
257        been dropped.
[1846]258
[1093]259        Parameters:
[1846]260
[1093]261            scanid:    a (list of) scan number(s)
[1846]262
[1093]263        """
264        from asap import _is_sequence_or_number as _is_valid
265        from asap import _to_list
266        from asap import unique
267        if not _is_valid(scanid):
[1859]268            raise RuntimeError( 'Please specify a scanno to drop from the scantable' )
269        scanid = _to_list(scanid)
270        allscans = unique([ self.getscan(i) for i in range(self.nrow())])
271        for sid in scanid: allscans.remove(sid)
272        if len(allscans) == 0:
273            raise ValueError("Can't remove all scans")
274        sel = selector(scans=allscans)
275        return self._select_copy(sel)
[1093]276
[1594]277    def _select_copy(self, selection):
278        orig = self.get_selection()
279        self.set_selection(orig+selection)
280        cp = self.copy()
281        self.set_selection(orig)
282        return cp
283
[102]284    def get_scan(self, scanid=None):
[1855]285        """\
[102]286        Return a specific scan (by scanno) or collection of scans (by
287        source name) in a new scantable.
[1846]288
289        *Note*:
290
[1348]291            See scantable.drop_scan() for the inverse operation.
[1846]292
[102]293        Parameters:
[1846]294
[513]295            scanid:    a (list of) scanno or a source name, unix-style
296                       patterns are accepted for source name matching, e.g.
297                       '*_R' gets all 'ref scans
[1846]298
299        Example::
300
[513]301            # get all scans containing the source '323p459'
302            newscan = scan.get_scan('323p459')
303            # get all 'off' scans
304            refscans = scan.get_scan('*_R')
305            # get a susbset of scans by scanno (as listed in scan.summary())
[1118]306            newscan = scan.get_scan([0, 2, 7, 10])
[1846]307
[102]308        """
309        if scanid is None:
[1859]310            raise RuntimeError( 'Please specify a scan no or name to '
311                                'retrieve from the scantable' )
[102]312        try:
[946]313            bsel = self.get_selection()
314            sel = selector()
[102]315            if type(scanid) is str:
[946]316                sel.set_name(scanid)
[1594]317                return self._select_copy(sel)
[102]318            elif type(scanid) is int:
[946]319                sel.set_scans([scanid])
[1594]320                return self._select_copy(sel)
[381]321            elif type(scanid) is list:
[946]322                sel.set_scans(scanid)
[1594]323                return self._select_copy(sel)
[381]324            else:
[718]325                msg = "Illegal scanid type, use 'int' or 'list' if ints."
[1859]326                raise TypeError(msg)
[102]327        except RuntimeError:
[1859]328            raise
[102]329
330    def __str__(self):
[1118]331        return Scantable._summary(self, True)
[102]332
[976]333    def summary(self, filename=None):
[1846]334        """\
[102]335        Print a summary of the contents of this scantable.
[1846]336
[102]337        Parameters:
[1846]338
[1931]339            filename:    the name of a file to write the putput to
[102]340                         Default - no file output
[1846]341
[102]342        """
[976]343        info = Scantable._summary(self, True)
[102]344        if filename is not None:
[256]345            if filename is "":
346                filename = 'scantable_summary.txt'
[415]347            from os.path import expandvars, isdir
[411]348            filename = expandvars(filename)
[415]349            if not isdir(filename):
[413]350                data = open(filename, 'w')
351                data.write(info)
352                data.close()
353            else:
[718]354                msg = "Illegal file name '%s'." % (filename)
[1859]355                raise IOError(msg)
356        return page(info)
[710]357
[1512]358    def get_spectrum(self, rowno):
[1471]359        """Return the spectrum for the current row in the scantable as a list.
[1846]360
[1471]361        Parameters:
[1846]362
[1573]363             rowno:   the row number to retrieve the spectrum from
[1846]364
[1471]365        """
366        return self._getspectrum(rowno)
[946]367
[1471]368    def get_mask(self, rowno):
369        """Return the mask for the current row in the scantable as a list.
[1846]370
[1471]371        Parameters:
[1846]372
[1573]373             rowno:   the row number to retrieve the mask from
[1846]374
[1471]375        """
376        return self._getmask(rowno)
377
378    def set_spectrum(self, spec, rowno):
[1938]379        """Set the spectrum for the current row in the scantable.
[1846]380
[1471]381        Parameters:
[1846]382
[1855]383             spec:   the new spectrum
[1846]384
[1855]385             rowno:  the row number to set the spectrum for
386
[1471]387        """
388        assert(len(spec) == self.nchan())
389        return self._setspectrum(spec, rowno)
390
[1600]391    def get_coordinate(self, rowno):
392        """Return the (spectral) coordinate for a a given 'rowno'.
[1846]393
394        *Note*:
395
[1600]396            * This coordinate is only valid until a scantable method modifies
397              the frequency axis.
398            * This coordinate does contain the original frequency set-up
399              NOT the new frame. The conversions however are done using the user
400              specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
401              use scantable.freq_align first. Without it there is no closure,
[1846]402              i.e.::
[1600]403
[1846]404                  c = myscan.get_coordinate(0)
405                  c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
406
[1600]407        Parameters:
[1846]408
[1600]409             rowno:    the row number for the spectral coordinate
410
411        """
412        return coordinate(Scantable.get_coordinate(self, rowno))
413
[946]414    def get_selection(self):
[1846]415        """\
[1005]416        Get the selection object currently set on this scantable.
[1846]417
418        Example::
419
[1005]420            sel = scan.get_selection()
421            sel.set_ifs(0)              # select IF 0
422            scan.set_selection(sel)     # apply modified selection
[1846]423
[946]424        """
425        return selector(self._getselection())
426
[1576]427    def set_selection(self, selection=None, **kw):
[1846]428        """\
[1005]429        Select a subset of the data. All following operations on this scantable
430        are only applied to thi selection.
[1846]431
[1005]432        Parameters:
[1697]433
[1846]434            selection:    a selector object (default unset the selection), or
435                          any combination of "pols", "ifs", "beams", "scans",
436                          "cycles", "name", "query"
[1697]437
[1846]438        Examples::
[1697]439
[1005]440            sel = selector()         # create a selection object
[1118]441            self.set_scans([0, 3])    # select SCANNO 0 and 3
[1005]442            scan.set_selection(sel)  # set the selection
443            scan.summary()           # will only print summary of scanno 0 an 3
444            scan.set_selection()     # unset the selection
[1697]445            # or the equivalent
446            scan.set_selection(scans=[0,3])
447            scan.summary()           # will only print summary of scanno 0 an 3
448            scan.set_selection()     # unset the selection
[1846]449
[946]450        """
[1576]451        if selection is None:
452            # reset
453            if len(kw) == 0:
454                selection = selector()
455            else:
456                # try keywords
457                for k in kw:
458                    if k not in selector.fields:
459                        raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
460                selection = selector(**kw)
[946]461        self._setselection(selection)
462
[1819]463    def get_row(self, row=0, insitu=None):
[1846]464        """\
[1819]465        Select a row in the scantable.
466        Return a scantable with single row.
[1846]467
[1819]468        Parameters:
[1846]469
470            row:    row no of integration, default is 0.
471            insitu: if False a new scantable is returned. Otherwise, the
472                    scaling is done in-situ. The default is taken from .asaprc
473                    (False)
474
[1819]475        """
476        if insitu is None: insitu = rcParams['insitu']
477        if not insitu:
478            workscan = self.copy()
479        else:
480            workscan = self
481        # Select a row
482        sel=selector()
483        sel.set_scans([workscan.getscan(row)])
484        sel.set_cycles([workscan.getcycle(row)])
485        sel.set_beams([workscan.getbeam(row)])
486        sel.set_ifs([workscan.getif(row)])
487        sel.set_polarisations([workscan.getpol(row)])
488        sel.set_name(workscan._getsourcename(row))
489        workscan.set_selection(sel)
490        if not workscan.nrow() == 1:
491            msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
492            raise RuntimeError(msg)
493        del sel
494        if insitu:
495            self._assign(workscan)
496        else:
497            return workscan
498
[1862]499    @asaplog_post_dec
[1907]500    def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
[1846]501        """\
[135]502        Determine the specified statistic of the current beam/if/pol
[102]503        Takes a 'mask' as an optional parameter to specify which
504        channels should be excluded.
[1846]505
[102]506        Parameters:
[1846]507
[1819]508            stat:    'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
509                     'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
[1855]510
[135]511            mask:    an optional mask specifying where the statistic
[102]512                     should be determined.
[1855]513
[1819]514            form:    format string to print statistic values
[1846]515
[1907]516            row:     row number of spectrum to process.
517                     (default is None: for all rows)
[1846]518
[1907]519        Example:
[113]520            scan.set_unit('channel')
[1118]521            msk = scan.create_mask([100, 200], [500, 600])
[135]522            scan.stats(stat='mean', mask=m)
[1846]523
[102]524        """
[1593]525        mask = mask or []
[876]526        if not self._check_ifs():
[1118]527            raise ValueError("Cannot apply mask as the IFs have different "
528                             "number of channels. Please use setselection() "
529                             "to select individual IFs")
[1819]530        rtnabc = False
531        if stat.lower().endswith('_abc'): rtnabc = True
532        getchan = False
533        if stat.lower().startswith('min') or stat.lower().startswith('max'):
534            chan = self._math._minmaxchan(self, mask, stat)
535            getchan = True
536            statvals = []
[1907]537        if not rtnabc:
538            if row == None:
539                statvals = self._math._stats(self, mask, stat)
540            else:
541                statvals = self._math._statsrow(self, mask, stat, int(row))
[256]542
[1819]543        #def cb(i):
544        #    return statvals[i]
[256]545
[1819]546        #return self._row_callback(cb, stat)
[102]547
[1819]548        label=stat
549        #callback=cb
550        out = ""
551        #outvec = []
552        sep = '-'*50
[1907]553
554        if row == None:
555            rows = xrange(self.nrow())
556        elif isinstance(row, int):
557            rows = [ row ]
558
559        for i in rows:
[1819]560            refstr = ''
561            statunit= ''
562            if getchan:
563                qx, qy = self.chan2data(rowno=i, chan=chan[i])
564                if rtnabc:
565                    statvals.append(qx['value'])
566                    refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
567                    statunit= '['+qx['unit']+']'
568                else:
569                    refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
570
571            tm = self._gettime(i)
572            src = self._getsourcename(i)
573            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
574            out += 'Time[%s]:\n' % (tm)
[1907]575            if self.nbeam(-1) > 1: out +=  ' Beam[%d] ' % (self.getbeam(i))
576            if self.nif(-1) > 1:   out +=  ' IF[%d] ' % (self.getif(i))
577            if self.npol(-1) > 1:  out +=  ' Pol[%d] ' % (self.getpol(i))
[1819]578            #outvec.append(callback(i))
[1907]579            if len(rows) > 1:
580                # out += ('= %'+form) % (outvec[i]) +'   '+refstr+'\n'
581                out += ('= %'+form) % (statvals[i]) +'   '+refstr+'\n'
582            else:
583                # out += ('= %'+form) % (outvec[0]) +'   '+refstr+'\n'
584                out += ('= %'+form) % (statvals[0]) +'   '+refstr+'\n'
[1819]585            out +=  sep+"\n"
586
[1859]587        import os
588        if os.environ.has_key( 'USER' ):
589            usr = os.environ['USER']
590        else:
591            import commands
592            usr = commands.getoutput( 'whoami' )
593        tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
594        f = open(tmpfile,'w')
595        print >> f, sep
596        print >> f, ' %s %s' % (label, statunit)
597        print >> f, sep
598        print >> f, out
599        f.close()
600        f = open(tmpfile,'r')
601        x = f.readlines()
602        f.close()
603        asaplog.push(''.join(x), False)
604
[1819]605        return statvals
606
607    def chan2data(self, rowno=0, chan=0):
[1846]608        """\
[1819]609        Returns channel/frequency/velocity and spectral value
610        at an arbitrary row and channel in the scantable.
[1846]611
[1819]612        Parameters:
[1846]613
[1819]614            rowno:   a row number in the scantable. Default is the
615                     first row, i.e. rowno=0
[1855]616
[1819]617            chan:    a channel in the scantable. Default is the first
618                     channel, i.e. pos=0
[1846]619
[1819]620        """
621        if isinstance(rowno, int) and isinstance(chan, int):
622            qx = {'unit': self.get_unit(),
623                  'value': self._getabcissa(rowno)[chan]}
624            qy = {'unit': self.get_fluxunit(),
625                  'value': self._getspectrum(rowno)[chan]}
626            return qx, qy
627
[1118]628    def stddev(self, mask=None):
[1846]629        """\
[135]630        Determine the standard deviation of the current beam/if/pol
631        Takes a 'mask' as an optional parameter to specify which
632        channels should be excluded.
[1846]633
[135]634        Parameters:
[1846]635
[135]636            mask:    an optional mask specifying where the standard
637                     deviation should be determined.
638
[1846]639        Example::
640
[135]641            scan.set_unit('channel')
[1118]642            msk = scan.create_mask([100, 200], [500, 600])
[135]643            scan.stddev(mask=m)
[1846]644
[135]645        """
[1118]646        return self.stats(stat='stddev', mask=mask);
[135]647
[1003]648
[1259]649    def get_column_names(self):
[1846]650        """\
[1003]651        Return a  list of column names, which can be used for selection.
652        """
[1259]653        return list(Scantable.get_column_names(self))
[1003]654
[1730]655    def get_tsys(self, row=-1):
[1846]656        """\
[113]657        Return the System temperatures.
[1846]658
659        Parameters:
660
661            row:    the rowno to get the information for. (default all rows)
662
[113]663        Returns:
[1846]664
[876]665            a list of Tsys values for the current selection
[1846]666
[113]667        """
[1730]668        if row > -1:
669            return self._get_column(self._gettsys, row)
[876]670        return self._row_callback(self._gettsys, "Tsys")
[256]671
[1730]672
673    def get_weather(self, row=-1):
[1846]674        """\
675        Return the weather informations.
676
677        Parameters:
678
679            row:    the rowno to get the information for. (default all rows)
680
681        Returns:
682
683            a dict or list of of dicts of values for the current selection
684
685        """
686
[1730]687        values = self._get_column(self._get_weather, row)
688        if row > -1:
689            return {'temperature': values[0],
690                    'pressure': values[1], 'humidity' : values[2],
691                    'windspeed' : values[3], 'windaz' : values[4]
692                    }
693        else:
694            out = []
695            for r in values:
696
697                out.append({'temperature': r[0],
698                            'pressure': r[1], 'humidity' : r[2],
699                            'windspeed' : r[3], 'windaz' : r[4]
700                    })
701            return out
702
[876]703    def _row_callback(self, callback, label):
704        out = ""
[1118]705        outvec = []
[1590]706        sep = '-'*50
[876]707        for i in range(self.nrow()):
708            tm = self._gettime(i)
709            src = self._getsourcename(i)
[1590]710            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
[876]711            out += 'Time[%s]:\n' % (tm)
[1590]712            if self.nbeam(-1) > 1:
713                out +=  ' Beam[%d] ' % (self.getbeam(i))
714            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (self.getif(i))
715            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (self.getpol(i))
[876]716            outvec.append(callback(i))
717            out += '= %3.3f\n' % (outvec[i])
[1590]718            out +=  sep+'\n'
[1859]719
720        asaplog.push(sep)
721        asaplog.push(" %s" % (label))
722        asaplog.push(sep)
723        asaplog.push(out)
[1861]724        asaplog.post()
[1175]725        return outvec
[256]726
[1070]727    def _get_column(self, callback, row=-1):
728        """
729        """
730        if row == -1:
731            return [callback(i) for i in range(self.nrow())]
732        else:
[1819]733            if  0 <= row < self.nrow():
[1070]734                return callback(row)
[256]735
[1070]736
[1348]737    def get_time(self, row=-1, asdatetime=False):
[1846]738        """\
[113]739        Get a list of time stamps for the observations.
[1938]740        Return a datetime object or a string (default) for each
741        integration time stamp in the scantable.
[1846]742
[113]743        Parameters:
[1846]744
[1348]745            row:          row no of integration. Default -1 return all rows
[1855]746
[1348]747            asdatetime:   return values as datetime objects rather than strings
[1846]748
[113]749        """
[1175]750        from time import strptime
751        from datetime import datetime
[1392]752        times = self._get_column(self._gettime, row)
[1348]753        if not asdatetime:
[1392]754            return times
[1175]755        format = "%Y/%m/%d/%H:%M:%S"
756        if isinstance(times, list):
757            return [datetime(*strptime(i, format)[:6]) for i in times]
758        else:
759            return datetime(*strptime(times, format)[:6])
[102]760
[1348]761
762    def get_inttime(self, row=-1):
[1846]763        """\
[1348]764        Get a list of integration times for the observations.
765        Return a time in seconds for each integration in the scantable.
[1846]766
[1348]767        Parameters:
[1846]768
[1348]769            row:    row no of integration. Default -1 return all rows.
[1846]770
[1348]771        """
[1573]772        return self._get_column(self._getinttime, row)
[1348]773
[1573]774
[714]775    def get_sourcename(self, row=-1):
[1846]776        """\
[794]777        Get a list source names for the observations.
[714]778        Return a string for each integration in the scantable.
779        Parameters:
[1846]780
[1348]781            row:    row no of integration. Default -1 return all rows.
[1846]782
[714]783        """
[1070]784        return self._get_column(self._getsourcename, row)
[714]785
[794]786    def get_elevation(self, row=-1):
[1846]787        """\
[794]788        Get a list of elevations for the observations.
789        Return a float for each integration in the scantable.
[1846]790
[794]791        Parameters:
[1846]792
[1348]793            row:    row no of integration. Default -1 return all rows.
[1846]794
[794]795        """
[1070]796        return self._get_column(self._getelevation, row)
[794]797
798    def get_azimuth(self, row=-1):
[1846]799        """\
[794]800        Get a list of azimuths for the observations.
801        Return a float for each integration in the scantable.
[1846]802
[794]803        Parameters:
[1348]804            row:    row no of integration. Default -1 return all rows.
[1846]805
[794]806        """
[1070]807        return self._get_column(self._getazimuth, row)
[794]808
809    def get_parangle(self, row=-1):
[1846]810        """\
[794]811        Get a list of parallactic angles for the observations.
812        Return a float for each integration in the scantable.
[1846]813
[794]814        Parameters:
[1846]815
[1348]816            row:    row no of integration. Default -1 return all rows.
[1846]817
[794]818        """
[1070]819        return self._get_column(self._getparangle, row)
[794]820
[1070]821    def get_direction(self, row=-1):
822        """
823        Get a list of Positions on the sky (direction) for the observations.
[1594]824        Return a string for each integration in the scantable.
[1855]825
[1070]826        Parameters:
[1855]827
[1070]828            row:    row no of integration. Default -1 return all rows
[1855]829
[1070]830        """
831        return self._get_column(self._getdirection, row)
832
[1391]833    def get_directionval(self, row=-1):
[1846]834        """\
[1391]835        Get a list of Positions on the sky (direction) for the observations.
836        Return a float for each integration in the scantable.
[1846]837
[1391]838        Parameters:
[1846]839
[1391]840            row:    row no of integration. Default -1 return all rows
[1846]841
[1391]842        """
843        return self._get_column(self._getdirectionvec, row)
844
[1862]845    @asaplog_post_dec
[102]846    def set_unit(self, unit='channel'):
[1846]847        """\
[102]848        Set the unit for all following operations on this scantable
[1846]849
[102]850        Parameters:
[1846]851
852            unit:    optional unit, default is 'channel'. Use one of '*Hz',
853                     'km/s', 'channel' or equivalent ''
854
[102]855        """
[484]856        varlist = vars()
[1118]857        if unit in ['', 'pixel', 'channel']:
[113]858            unit = ''
859        inf = list(self._getcoordinfo())
860        inf[0] = unit
861        self._setcoordinfo(inf)
[1118]862        self._add_history("set_unit", varlist)
[113]863
[1862]864    @asaplog_post_dec
[484]865    def set_instrument(self, instr):
[1846]866        """\
[1348]867        Set the instrument for subsequent processing.
[1846]868
[358]869        Parameters:
[1846]870
[710]871            instr:    Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]872                      'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[1846]873
[358]874        """
875        self._setInstrument(instr)
[1118]876        self._add_history("set_instument", vars())
[358]877
[1862]878    @asaplog_post_dec
[1190]879    def set_feedtype(self, feedtype):
[1846]880        """\
[1190]881        Overwrite the feed type, which might not be set correctly.
[1846]882
[1190]883        Parameters:
[1846]884
[1190]885            feedtype:     'linear' or 'circular'
[1846]886
[1190]887        """
888        self._setfeedtype(feedtype)
889        self._add_history("set_feedtype", vars())
890
[1862]891    @asaplog_post_dec
[276]892    def set_doppler(self, doppler='RADIO'):
[1846]893        """\
[276]894        Set the doppler for all following operations on this scantable.
[1846]895
[276]896        Parameters:
[1846]897
[276]898            doppler:    One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
[1846]899
[276]900        """
[484]901        varlist = vars()
[276]902        inf = list(self._getcoordinfo())
903        inf[2] = doppler
904        self._setcoordinfo(inf)
[1118]905        self._add_history("set_doppler", vars())
[710]906
[1862]907    @asaplog_post_dec
[226]908    def set_freqframe(self, frame=None):
[1846]909        """\
[113]910        Set the frame type of the Spectral Axis.
[1846]911
[113]912        Parameters:
[1846]913
[591]914            frame:   an optional frame type, default 'LSRK'. Valid frames are:
[1819]915                     'TOPO', 'LSRD', 'LSRK', 'BARY',
[1118]916                     'GEO', 'GALACTO', 'LGROUP', 'CMB'
[1846]917
918        Example::
919
[113]920            scan.set_freqframe('BARY')
[1846]921
[113]922        """
[1593]923        frame = frame or rcParams['scantable.freqframe']
[484]924        varlist = vars()
[1819]925        # "REST" is not implemented in casacore
926        #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
927        #           'GEO', 'GALACTO', 'LGROUP', 'CMB']
928        valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
[1118]929                   'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]930
[989]931        if frame in valid:
[113]932            inf = list(self._getcoordinfo())
933            inf[1] = frame
934            self._setcoordinfo(inf)
[1118]935            self._add_history("set_freqframe", varlist)
[102]936        else:
[1118]937            msg  = "Please specify a valid freq type. Valid types are:\n", valid
[1859]938            raise TypeError(msg)
[710]939
[1862]940    @asaplog_post_dec
[989]941    def set_dirframe(self, frame=""):
[1846]942        """\
[989]943        Set the frame type of the Direction on the sky.
[1846]944
[989]945        Parameters:
[1846]946
[989]947            frame:   an optional frame type, default ''. Valid frames are:
948                     'J2000', 'B1950', 'GALACTIC'
[1846]949
950        Example:
951
[989]952            scan.set_dirframe('GALACTIC')
[1846]953
[989]954        """
955        varlist = vars()
[1859]956        Scantable.set_dirframe(self, frame)
[1118]957        self._add_history("set_dirframe", varlist)
[989]958
[113]959    def get_unit(self):
[1846]960        """\
[113]961        Get the default unit set in this scantable
[1846]962
[113]963        Returns:
[1846]964
[113]965            A unit string
[1846]966
[113]967        """
968        inf = self._getcoordinfo()
969        unit = inf[0]
970        if unit == '': unit = 'channel'
971        return unit
[102]972
[1862]973    @asaplog_post_dec
[158]974    def get_abcissa(self, rowno=0):
[1846]975        """\
[158]976        Get the abcissa in the current coordinate setup for the currently
[113]977        selected Beam/IF/Pol
[1846]978
[113]979        Parameters:
[1846]980
[226]981            rowno:    an optional row number in the scantable. Default is the
982                      first row, i.e. rowno=0
[1846]983
[113]984        Returns:
[1846]985
[1348]986            The abcissa values and the format string (as a dictionary)
[1846]987
[113]988        """
[256]989        abc = self._getabcissa(rowno)
[710]990        lbl = self._getabcissalabel(rowno)
[158]991        return abc, lbl
[113]992
[1862]993    @asaplog_post_dec
[1819]994    def flag(self, mask=None, unflag=False):
[1846]995        """\
[1001]996        Flag the selected data using an optional channel mask.
[1846]997
[1001]998        Parameters:
[1846]999
[1001]1000            mask:   an optional channel mask, created with create_mask. Default
1001                    (no mask) is all channels.
[1855]1002
[1819]1003            unflag:    if True, unflag the data
[1846]1004
[1001]1005        """
1006        varlist = vars()
[1593]1007        mask = mask or []
[1859]1008        self._flag(mask, unflag)
[1001]1009        self._add_history("flag", varlist)
1010
[1862]1011    @asaplog_post_dec
[1819]1012    def flag_row(self, rows=[], unflag=False):
[1846]1013        """\
[1819]1014        Flag the selected data in row-based manner.
[1846]1015
[1819]1016        Parameters:
[1846]1017
[1843]1018            rows:   list of row numbers to be flagged. Default is no row
1019                    (must be explicitly specified to execute row-based flagging).
[1855]1020
[1819]1021            unflag: if True, unflag the data.
[1846]1022
[1819]1023        """
1024        varlist = vars()
[1859]1025        self._flag_row(rows, unflag)
[1819]1026        self._add_history("flag_row", varlist)
1027
[1862]1028    @asaplog_post_dec
[1819]1029    def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
[1846]1030        """\
[1819]1031        Flag the selected data outside a specified range (in channel-base)
[1846]1032
[1819]1033        Parameters:
[1846]1034
[1819]1035            uthres:      upper threshold.
[1855]1036
[1819]1037            dthres:      lower threshold
[1846]1038
[1819]1039            clipoutside: True for flagging data outside the range [dthres:uthres].
[1928]1040                         False for flagging data inside the range.
[1855]1041
[1846]1042            unflag:      if True, unflag the data.
1043
[1819]1044        """
1045        varlist = vars()
[1859]1046        self._clip(uthres, dthres, clipoutside, unflag)
[1819]1047        self._add_history("clip", varlist)
1048
[1862]1049    @asaplog_post_dec
[1584]1050    def lag_flag(self, start, end, unit="MHz", insitu=None):
[1846]1051        """\
[1192]1052        Flag the data in 'lag' space by providing a frequency to remove.
[1584]1053        Flagged data in the scantable gets interpolated over the region.
[1192]1054        No taper is applied.
[1846]1055
[1192]1056        Parameters:
[1846]1057
[1579]1058            start:    the start frequency (really a period within the
1059                      bandwidth)  or period to remove
[1855]1060
[1579]1061            end:      the end frequency or period to remove
[1855]1062
[1584]1063            unit:     the frequency unit (default "MHz") or "" for
[1579]1064                      explicit lag channels
[1846]1065
1066        *Notes*:
1067
[1579]1068            It is recommended to flag edges of the band or strong
[1348]1069            signals beforehand.
[1846]1070
[1192]1071        """
1072        if insitu is None: insitu = rcParams['insitu']
1073        self._math._setinsitu(insitu)
1074        varlist = vars()
[1579]1075        base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1076        if not (unit == "" or base.has_key(unit)):
[1192]1077            raise ValueError("%s is not a valid unit." % unit)
[1859]1078        if unit == "":
1079            s = scantable(self._math._lag_flag(self, start, end, "lags"))
1080        else:
1081            s = scantable(self._math._lag_flag(self, start*base[unit],
1082                                               end*base[unit], "frequency"))
[1192]1083        s._add_history("lag_flag", varlist)
1084        if insitu:
1085            self._assign(s)
1086        else:
1087            return s
[1001]1088
[1862]1089    @asaplog_post_dec
[113]1090    def create_mask(self, *args, **kwargs):
[1846]1091        """\
[1118]1092        Compute and return a mask based on [min, max] windows.
[189]1093        The specified windows are to be INCLUDED, when the mask is
[113]1094        applied.
[1846]1095
[102]1096        Parameters:
[1846]1097
[1118]1098            [min, max], [min2, max2], ...
[1024]1099                Pairs of start/end points (inclusive)specifying the regions
[102]1100                to be masked
[1855]1101
[189]1102            invert:     optional argument. If specified as True,
1103                        return an inverted mask, i.e. the regions
1104                        specified are EXCLUDED
[1855]1105
[513]1106            row:        create the mask using the specified row for
1107                        unit conversions, default is row=0
1108                        only necessary if frequency varies over rows.
[1846]1109
1110        Examples::
1111
[113]1112            scan.set_unit('channel')
[1846]1113            # a)
[1118]1114            msk = scan.create_mask([400, 500], [800, 900])
[189]1115            # masks everything outside 400 and 500
[113]1116            # and 800 and 900 in the unit 'channel'
1117
[1846]1118            # b)
[1118]1119            msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]1120            # masks the regions between 400 and 500
[113]1121            # and 800 and 900 in the unit 'channel'
[1846]1122
1123            # c)
1124            #mask only channel 400
[1554]1125            msk =  scan.create_mask([400])
[1846]1126
[102]1127        """
[1554]1128        row = kwargs.get("row", 0)
[513]1129        data = self._getabcissa(row)
[113]1130        u = self._getcoordinfo()[0]
[1859]1131        if u == "":
1132            u = "channel"
1133        msg = "The current mask window unit is %s" % u
1134        i = self._check_ifs()
1135        if not i:
1136            msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1137        asaplog.push(msg)
[102]1138        n = self.nchan()
[1295]1139        msk = _n_bools(n, False)
[710]1140        # test if args is a 'list' or a 'normal *args - UGLY!!!
1141
[1118]1142        ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1143             and args or args[0]
[710]1144        for window in ws:
[1554]1145            if len(window) == 1:
1146                window = [window[0], window[0]]
1147            if len(window) == 0 or  len(window) > 2:
1148                raise ValueError("A window needs to be defined as [start(, end)]")
[1545]1149            if window[0] > window[1]:
1150                tmp = window[0]
1151                window[0] = window[1]
1152                window[1] = tmp
[102]1153            for i in range(n):
[1024]1154                if data[i] >= window[0] and data[i] <= window[1]:
[1295]1155                    msk[i] = True
[113]1156        if kwargs.has_key('invert'):
1157            if kwargs.get('invert'):
[1295]1158                msk = mask_not(msk)
[102]1159        return msk
[710]1160
[1931]1161    def get_masklist(self, mask=None, row=0, silent=False):
[1846]1162        """\
[1819]1163        Compute and return a list of mask windows, [min, max].
[1846]1164
[1819]1165        Parameters:
[1846]1166
[1819]1167            mask:       channel mask, created with create_mask.
[1855]1168
[1819]1169            row:        calcutate the masklist using the specified row
1170                        for unit conversions, default is row=0
1171                        only necessary if frequency varies over rows.
[1846]1172
[1819]1173        Returns:
[1846]1174
[1819]1175            [min, max], [min2, max2], ...
1176                Pairs of start/end points (inclusive)specifying
1177                the masked regions
[1846]1178
[1819]1179        """
1180        if not (isinstance(mask,list) or isinstance(mask, tuple)):
1181            raise TypeError("The mask should be list or tuple.")
1182        if len(mask) < 2:
1183            raise TypeError("The mask elements should be > 1")
1184        if self.nchan() != len(mask):
1185            msg = "Number of channels in scantable != number of mask elements"
1186            raise TypeError(msg)
1187        data = self._getabcissa(row)
1188        u = self._getcoordinfo()[0]
[1859]1189        if u == "":
1190            u = "channel"
1191        msg = "The current mask window unit is %s" % u
1192        i = self._check_ifs()
1193        if not i:
1194            msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
[1931]1195        if not silent:
1196            asaplog.push(msg)
[1819]1197        masklist=[]
1198        ist, ien = None, None
1199        ist, ien=self.get_mask_indices(mask)
1200        if ist is not None and ien is not None:
1201            for i in xrange(len(ist)):
1202                range=[data[ist[i]],data[ien[i]]]
1203                range.sort()
1204                masklist.append([range[0],range[1]])
1205        return masklist
1206
1207    def get_mask_indices(self, mask=None):
[1846]1208        """\
[1819]1209        Compute and Return lists of mask start indices and mask end indices.
[1855]1210
1211        Parameters:
1212
[1819]1213            mask:       channel mask, created with create_mask.
[1846]1214
[1819]1215        Returns:
[1846]1216
[1819]1217            List of mask start indices and that of mask end indices,
1218            i.e., [istart1,istart2,....], [iend1,iend2,....].
[1846]1219
[1819]1220        """
1221        if not (isinstance(mask,list) or isinstance(mask, tuple)):
1222            raise TypeError("The mask should be list or tuple.")
1223        if len(mask) < 2:
1224            raise TypeError("The mask elements should be > 1")
1225        istart=[]
1226        iend=[]
1227        if mask[0]: istart.append(0)
1228        for i in range(len(mask)-1):
1229            if not mask[i] and mask[i+1]:
1230                istart.append(i+1)
1231            elif mask[i] and not mask[i+1]:
1232                iend.append(i)
1233        if mask[len(mask)-1]: iend.append(len(mask)-1)
1234        if len(istart) != len(iend):
1235            raise RuntimeError("Numbers of mask start != mask end.")
1236        for i in range(len(istart)):
1237            if istart[i] > iend[i]:
1238                raise RuntimeError("Mask start index > mask end index")
1239                break
1240        return istart,iend
1241
1242#    def get_restfreqs(self):
1243#        """
1244#        Get the restfrequency(s) stored in this scantable.
1245#        The return value(s) are always of unit 'Hz'
1246#        Parameters:
1247#            none
1248#        Returns:
1249#            a list of doubles
1250#        """
1251#        return list(self._getrestfreqs())
1252
1253    def get_restfreqs(self, ids=None):
[1846]1254        """\
[256]1255        Get the restfrequency(s) stored in this scantable.
1256        The return value(s) are always of unit 'Hz'
[1846]1257
[256]1258        Parameters:
[1846]1259
[1819]1260            ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1261                 be retrieved
[1846]1262
[256]1263        Returns:
[1846]1264
[1819]1265            dictionary containing ids and a list of doubles for each id
[1846]1266
[256]1267        """
[1819]1268        if ids is None:
1269            rfreqs={}
1270            idlist = self.getmolnos()
1271            for i in idlist:
1272                rfreqs[i]=list(self._getrestfreqs(i))
1273            return rfreqs
1274        else:
1275            if type(ids)==list or type(ids)==tuple:
1276                rfreqs={}
1277                for i in ids:
1278                    rfreqs[i]=list(self._getrestfreqs(i))
1279                return rfreqs
1280            else:
1281                return list(self._getrestfreqs(ids))
1282            #return list(self._getrestfreqs(ids))
[102]1283
[931]1284    def set_restfreqs(self, freqs=None, unit='Hz'):
[1846]1285        """\
[931]1286        Set or replace the restfrequency specified and
[1938]1287        if the 'freqs' argument holds a scalar,
[931]1288        then that rest frequency will be applied to all the selected
1289        data.  If the 'freqs' argument holds
1290        a vector, then it MUST be of equal or smaller length than
1291        the number of IFs (and the available restfrequencies will be
1292        replaced by this vector).  In this case, *all* data have
1293        the restfrequency set per IF according
1294        to the corresponding value you give in the 'freqs' vector.
[1118]1295        E.g. 'freqs=[1e9, 2e9]'  would mean IF 0 gets restfreq 1e9 and
[931]1296        IF 1 gets restfreq 2e9.
[1846]1297
[1395]1298        You can also specify the frequencies via a linecatalog.
[1153]1299
[931]1300        Parameters:
[1846]1301
[931]1302            freqs:   list of rest frequency values or string idenitfiers
[1855]1303
[931]1304            unit:    unit for rest frequency (default 'Hz')
[402]1305
[1846]1306
1307        Example::
1308
[1819]1309            # set the given restfrequency for the all currently selected IFs
[931]1310            scan.set_restfreqs(freqs=1.4e9)
[1845]1311            # set restfrequencies for the n IFs  (n > 1) in the order of the
1312            # list, i.e
1313            # IF0 -> 1.4e9, IF1 ->  1.41e9, IF3 -> 1.42e9
1314            # len(list_of_restfreqs) == nIF
1315            # for nIF == 1 the following will set multiple restfrequency for
1316            # that IF
[1819]1317            scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
[1845]1318            # set multiple restfrequencies per IF. as a list of lists where
1319            # the outer list has nIF elements, the inner s arbitrary
1320            scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
[391]1321
[1846]1322       *Note*:
[1845]1323
[931]1324            To do more sophisticate Restfrequency setting, e.g. on a
1325            source and IF basis, use scantable.set_selection() before using
[1846]1326            this function::
[931]1327
[1846]1328                # provided your scantable is called scan
1329                selection = selector()
1330                selection.set_name("ORION*")
1331                selection.set_ifs([1])
1332                scan.set_selection(selection)
1333                scan.set_restfreqs(freqs=86.6e9)
1334
[931]1335        """
1336        varlist = vars()
[1157]1337        from asap import linecatalog
1338        # simple  value
[1118]1339        if isinstance(freqs, int) or isinstance(freqs, float):
[1845]1340            self._setrestfreqs([freqs], [""], unit)
[1157]1341        # list of values
[1118]1342        elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]1343            # list values are scalars
[1118]1344            if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[1845]1345                if len(freqs) == 1:
1346                    self._setrestfreqs(freqs, [""], unit)
1347                else:
1348                    # allow the 'old' mode of setting mulitple IFs
1349                    sel = selector()
1350                    savesel = self._getselection()
1351                    iflist = self.getifnos()
1352                    if len(freqs)>len(iflist):
1353                        raise ValueError("number of elements in list of list "
1354                                         "exeeds the current IF selections")
1355                    iflist = self.getifnos()
1356                    for i, fval in enumerate(freqs):
1357                        sel.set_ifs(iflist[i])
1358                        self._setselection(sel)
1359                        self._setrestfreqs([fval], [""], unit)
1360                    self._setselection(savesel)
1361
1362            # list values are dict, {'value'=, 'name'=)
[1157]1363            elif isinstance(freqs[-1], dict):
[1845]1364                values = []
1365                names = []
1366                for d in freqs:
1367                    values.append(d["value"])
1368                    names.append(d["name"])
1369                self._setrestfreqs(values, names, unit)
[1819]1370            elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
[1157]1371                sel = selector()
1372                savesel = self._getselection()
[1322]1373                iflist = self.getifnos()
[1819]1374                if len(freqs)>len(iflist):
[1845]1375                    raise ValueError("number of elements in list of list exeeds"
1376                                     " the current IF selections")
1377                for i, fval in enumerate(freqs):
[1322]1378                    sel.set_ifs(iflist[i])
[1259]1379                    self._setselection(sel)
[1845]1380                    self._setrestfreqs(fval, [""], unit)
[1157]1381                self._setselection(savesel)
1382        # freqs are to be taken from a linecatalog
[1153]1383        elif isinstance(freqs, linecatalog):
1384            sel = selector()
1385            savesel = self._getselection()
1386            for i in xrange(freqs.nrow()):
[1322]1387                sel.set_ifs(iflist[i])
[1153]1388                self._setselection(sel)
[1845]1389                self._setrestfreqs([freqs.get_frequency(i)],
1390                                   [freqs.get_name(i)], "MHz")
[1153]1391                # ensure that we are not iterating past nIF
1392                if i == self.nif()-1: break
1393            self._setselection(savesel)
[931]1394        else:
1395            return
1396        self._add_history("set_restfreqs", varlist)
1397
[1360]1398    def shift_refpix(self, delta):
[1846]1399        """\
[1589]1400        Shift the reference pixel of the Spectra Coordinate by an
1401        integer amount.
[1846]1402
[1589]1403        Parameters:
[1846]1404
[1589]1405            delta:   the amount to shift by
[1846]1406
1407        *Note*:
1408
[1589]1409            Be careful using this with broadband data.
[1846]1410
[1360]1411        """
[1731]1412        Scantable.shift_refpix(self, delta)
[931]1413
[1862]1414    @asaplog_post_dec
[1259]1415    def history(self, filename=None):
[1846]1416        """\
[1259]1417        Print the history. Optionally to a file.
[1846]1418
[1348]1419        Parameters:
[1846]1420
[1928]1421            filename:    The name of the file to save the history to.
[1846]1422
[1259]1423        """
[484]1424        hist = list(self._gethistory())
[794]1425        out = "-"*80
[484]1426        for h in hist:
[489]1427            if h.startswith("---"):
[1857]1428                out = "\n".join([out, h])
[489]1429            else:
1430                items = h.split("##")
1431                date = items[0]
1432                func = items[1]
1433                items = items[2:]
[794]1434                out += "\n"+date+"\n"
1435                out += "Function: %s\n  Parameters:" % (func)
[489]1436                for i in items:
[1938]1437                    if i == '':
1438                        continue
[489]1439                    s = i.split("=")
[1118]1440                    out += "\n   %s = %s" % (s[0], s[1])
[1857]1441                out = "\n".join([out, "-"*80])
[1259]1442        if filename is not None:
1443            if filename is "":
1444                filename = 'scantable_history.txt'
1445            import os
1446            filename = os.path.expandvars(os.path.expanduser(filename))
1447            if not os.path.isdir(filename):
1448                data = open(filename, 'w')
1449                data.write(out)
1450                data.close()
1451            else:
1452                msg = "Illegal file name '%s'." % (filename)
[1859]1453                raise IOError(msg)
1454        return page(out)
[513]1455    #
1456    # Maths business
1457    #
[1862]1458    @asaplog_post_dec
[931]1459    def average_time(self, mask=None, scanav=False, weight='tint', align=False):
[1846]1460        """\
[1070]1461        Return the (time) weighted average of a scan.
[1846]1462
1463        *Note*:
1464
[1070]1465            in channels only - align if necessary
[1846]1466
[513]1467        Parameters:
[1846]1468
[513]1469            mask:     an optional mask (only used for 'var' and 'tsys'
1470                      weighting)
[1855]1471
[558]1472            scanav:   True averages each scan separately
1473                      False (default) averages all scans together,
[1855]1474
[1099]1475            weight:   Weighting scheme.
1476                      'none'     (mean no weight)
1477                      'var'      (1/var(spec) weighted)
1478                      'tsys'     (1/Tsys**2 weighted)
1479                      'tint'     (integration time weighted)
1480                      'tintsys'  (Tint/Tsys**2)
1481                      'median'   ( median averaging)
[535]1482                      The default is 'tint'
[1855]1483
[931]1484            align:    align the spectra in velocity before averaging. It takes
1485                      the time of the first spectrum as reference time.
[1846]1486
1487        Example::
1488
[513]1489            # time average the scantable without using a mask
[710]1490            newscan = scan.average_time()
[1846]1491
[513]1492        """
1493        varlist = vars()
[1593]1494        weight = weight or 'TINT'
1495        mask = mask or ()
1496        scanav = (scanav and 'SCAN') or 'NONE'
[1118]1497        scan = (self, )
[1859]1498
1499        if align:
1500            scan = (self.freq_align(insitu=False), )
1501        s = None
1502        if weight.upper() == 'MEDIAN':
1503            s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1504                                                     scanav))
1505        else:
1506            s = scantable(self._math._average(scan, mask, weight.upper(),
1507                          scanav))
[1099]1508        s._add_history("average_time", varlist)
[513]1509        return s
[710]1510
[1862]1511    @asaplog_post_dec
[876]1512    def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[1846]1513        """\
[513]1514        Return a scan where all spectra are converted to either
1515        Jansky or Kelvin depending upon the flux units of the scan table.
1516        By default the function tries to look the values up internally.
1517        If it can't find them (or if you want to over-ride), you must
1518        specify EITHER jyperk OR eta (and D which it will try to look up
1519        also if you don't set it). jyperk takes precedence if you set both.
[1846]1520
[513]1521        Parameters:
[1846]1522
[513]1523            jyperk:      the Jy / K conversion factor
[1855]1524
[513]1525            eta:         the aperture efficiency
[1855]1526
[1928]1527            d:           the geometric diameter (metres)
[1855]1528
[513]1529            insitu:      if False a new scantable is returned.
1530                         Otherwise, the scaling is done in-situ
1531                         The default is taken from .asaprc (False)
[1846]1532
[513]1533        """
1534        if insitu is None: insitu = rcParams['insitu']
[876]1535        self._math._setinsitu(insitu)
[513]1536        varlist = vars()
[1593]1537        jyperk = jyperk or -1.0
1538        d = d or -1.0
1539        eta = eta or -1.0
[876]1540        s = scantable(self._math._convertflux(self, d, eta, jyperk))
1541        s._add_history("convert_flux", varlist)
1542        if insitu: self._assign(s)
1543        else: return s
[513]1544
[1862]1545    @asaplog_post_dec
[876]1546    def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[1846]1547        """\
[513]1548        Return a scan after applying a gain-elevation correction.
1549        The correction can be made via either a polynomial or a
1550        table-based interpolation (and extrapolation if necessary).
1551        You specify polynomial coefficients, an ascii table or neither.
1552        If you specify neither, then a polynomial correction will be made
1553        with built in coefficients known for certain telescopes (an error
1554        will occur if the instrument is not known).
1555        The data and Tsys are *divided* by the scaling factors.
[1846]1556
[513]1557        Parameters:
[1846]1558
[513]1559            poly:        Polynomial coefficients (default None) to compute a
1560                         gain-elevation correction as a function of
1561                         elevation (in degrees).
[1855]1562
[513]1563            filename:    The name of an ascii file holding correction factors.
1564                         The first row of the ascii file must give the column
1565                         names and these MUST include columns
1566                         "ELEVATION" (degrees) and "FACTOR" (multiply data
1567                         by this) somewhere.
1568                         The second row must give the data type of the
1569                         column. Use 'R' for Real and 'I' for Integer.
1570                         An example file would be
1571                         (actual factors are arbitrary) :
1572
1573                         TIME ELEVATION FACTOR
1574                         R R R
1575                         0.1 0 0.8
1576                         0.2 20 0.85
1577                         0.3 40 0.9
1578                         0.4 60 0.85
1579                         0.5 80 0.8
1580                         0.6 90 0.75
[1855]1581
[513]1582            method:      Interpolation method when correcting from a table.
1583                         Values are  "nearest", "linear" (default), "cubic"
1584                         and "spline"
[1855]1585
[513]1586            insitu:      if False a new scantable is returned.
1587                         Otherwise, the scaling is done in-situ
1588                         The default is taken from .asaprc (False)
[1846]1589
[513]1590        """
1591
1592        if insitu is None: insitu = rcParams['insitu']
[876]1593        self._math._setinsitu(insitu)
[513]1594        varlist = vars()
[1593]1595        poly = poly or ()
[513]1596        from os.path import expandvars
1597        filename = expandvars(filename)
[876]1598        s = scantable(self._math._gainel(self, poly, filename, method))
1599        s._add_history("gain_el", varlist)
[1593]1600        if insitu:
1601            self._assign(s)
1602        else:
1603            return s
[710]1604
[1862]1605    @asaplog_post_dec
[931]1606    def freq_align(self, reftime=None, method='cubic', insitu=None):
[1846]1607        """\
[513]1608        Return a scan where all rows have been aligned in frequency/velocity.
1609        The alignment frequency frame (e.g. LSRK) is that set by function
1610        set_freqframe.
[1846]1611
[513]1612        Parameters:
[1855]1613
[513]1614            reftime:     reference time to align at. By default, the time of
1615                         the first row of data is used.
[1855]1616
[513]1617            method:      Interpolation method for regridding the spectra.
1618                         Choose from "nearest", "linear", "cubic" (default)
1619                         and "spline"
[1855]1620
[513]1621            insitu:      if False a new scantable is returned.
1622                         Otherwise, the scaling is done in-situ
1623                         The default is taken from .asaprc (False)
[1846]1624
[513]1625        """
[931]1626        if insitu is None: insitu = rcParams["insitu"]
[876]1627        self._math._setinsitu(insitu)
[513]1628        varlist = vars()
[1593]1629        reftime = reftime or ""
[931]1630        s = scantable(self._math._freq_align(self, reftime, method))
[876]1631        s._add_history("freq_align", varlist)
1632        if insitu: self._assign(s)
1633        else: return s
[513]1634
[1862]1635    @asaplog_post_dec
[1725]1636    def opacity(self, tau=None, insitu=None):
[1846]1637        """\
[513]1638        Apply an opacity correction. The data
1639        and Tsys are multiplied by the correction factor.
[1846]1640
[513]1641        Parameters:
[1855]1642
[1689]1643            tau:         (list of) opacity from which the correction factor is
[513]1644                         exp(tau*ZD)
[1689]1645                         where ZD is the zenith-distance.
1646                         If a list is provided, it has to be of length nIF,
1647                         nIF*nPol or 1 and in order of IF/POL, e.g.
1648                         [opif0pol0, opif0pol1, opif1pol0 ...]
[1725]1649                         if tau is `None` the opacities are determined from a
1650                         model.
[1855]1651
[513]1652            insitu:      if False a new scantable is returned.
1653                         Otherwise, the scaling is done in-situ
1654                         The default is taken from .asaprc (False)
[1846]1655
[513]1656        """
1657        if insitu is None: insitu = rcParams['insitu']
[876]1658        self._math._setinsitu(insitu)
[513]1659        varlist = vars()
[1689]1660        if not hasattr(tau, "__len__"):
1661            tau = [tau]
[876]1662        s = scantable(self._math._opacity(self, tau))
1663        s._add_history("opacity", varlist)
1664        if insitu: self._assign(s)
1665        else: return s
[513]1666
[1862]1667    @asaplog_post_dec
[513]1668    def bin(self, width=5, insitu=None):
[1846]1669        """\
[513]1670        Return a scan where all spectra have been binned up.
[1846]1671
[1348]1672        Parameters:
[1846]1673
[513]1674            width:       The bin width (default=5) in pixels
[1855]1675
[513]1676            insitu:      if False a new scantable is returned.
1677                         Otherwise, the scaling is done in-situ
1678                         The default is taken from .asaprc (False)
[1846]1679
[513]1680        """
1681        if insitu is None: insitu = rcParams['insitu']
[876]1682        self._math._setinsitu(insitu)
[513]1683        varlist = vars()
[876]1684        s = scantable(self._math._bin(self, width))
[1118]1685        s._add_history("bin", varlist)
[1589]1686        if insitu:
1687            self._assign(s)
1688        else:
1689            return s
[513]1690
[1862]1691    @asaplog_post_dec
[513]1692    def resample(self, width=5, method='cubic', insitu=None):
[1846]1693        """\
[1348]1694        Return a scan where all spectra have been binned up.
[1573]1695
[1348]1696        Parameters:
[1846]1697
[513]1698            width:       The bin width (default=5) in pixels
[1855]1699
[513]1700            method:      Interpolation method when correcting from a table.
1701                         Values are  "nearest", "linear", "cubic" (default)
1702                         and "spline"
[1855]1703
[513]1704            insitu:      if False a new scantable is returned.
1705                         Otherwise, the scaling is done in-situ
1706                         The default is taken from .asaprc (False)
[1846]1707
[513]1708        """
1709        if insitu is None: insitu = rcParams['insitu']
[876]1710        self._math._setinsitu(insitu)
[513]1711        varlist = vars()
[876]1712        s = scantable(self._math._resample(self, method, width))
[1118]1713        s._add_history("resample", varlist)
[876]1714        if insitu: self._assign(s)
1715        else: return s
[513]1716
[1862]1717    @asaplog_post_dec
[946]1718    def average_pol(self, mask=None, weight='none'):
[1846]1719        """\
[946]1720        Average the Polarisations together.
[1846]1721
[946]1722        Parameters:
[1846]1723
[946]1724            mask:        An optional mask defining the region, where the
1725                         averaging will be applied. The output will have all
1726                         specified points masked.
[1855]1727
[946]1728            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1729                         weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]1730
[946]1731        """
1732        varlist = vars()
[1593]1733        mask = mask or ()
[1010]1734        s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]1735        s._add_history("average_pol", varlist)
[992]1736        return s
[513]1737
[1862]1738    @asaplog_post_dec
[1145]1739    def average_beam(self, mask=None, weight='none'):
[1846]1740        """\
[1145]1741        Average the Beams together.
[1846]1742
[1145]1743        Parameters:
1744            mask:        An optional mask defining the region, where the
1745                         averaging will be applied. The output will have all
1746                         specified points masked.
[1855]1747
[1145]1748            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1749                         weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]1750
[1145]1751        """
1752        varlist = vars()
[1593]1753        mask = mask or ()
[1145]1754        s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1755        s._add_history("average_beam", varlist)
1756        return s
1757
[1586]1758    def parallactify(self, pflag):
[1846]1759        """\
[1843]1760        Set a flag to indicate whether this data should be treated as having
[1617]1761        been 'parallactified' (total phase == 0.0)
[1846]1762
[1617]1763        Parameters:
[1855]1764
[1843]1765            pflag:  Bool indicating whether to turn this on (True) or
[1617]1766                    off (False)
[1846]1767
[1617]1768        """
[1586]1769        varlist = vars()
1770        self._parallactify(pflag)
1771        self._add_history("parallactify", varlist)
1772
[1862]1773    @asaplog_post_dec
[992]1774    def convert_pol(self, poltype=None):
[1846]1775        """\
[992]1776        Convert the data to a different polarisation type.
[1565]1777        Note that you will need cross-polarisation terms for most conversions.
[1846]1778
[992]1779        Parameters:
[1855]1780
[992]1781            poltype:    The new polarisation type. Valid types are:
[1565]1782                        "linear", "circular", "stokes" and "linpol"
[1846]1783
[992]1784        """
1785        varlist = vars()
[1859]1786        s = scantable(self._math._convertpol(self, poltype))
[1118]1787        s._add_history("convert_pol", varlist)
[992]1788        return s
1789
[1862]1790    @asaplog_post_dec
[1819]1791    def smooth(self, kernel="hanning", width=5.0, order=2, plot=False, insitu=None):
[1846]1792        """\
[513]1793        Smooth the spectrum by the specified kernel (conserving flux).
[1846]1794
[513]1795        Parameters:
[1846]1796
[513]1797            kernel:     The type of smoothing kernel. Select from
[1574]1798                        'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1799                        or 'poly'
[1855]1800
[513]1801            width:      The width of the kernel in pixels. For hanning this is
1802                        ignored otherwise it defauls to 5 pixels.
1803                        For 'gaussian' it is the Full Width Half
1804                        Maximum. For 'boxcar' it is the full width.
[1574]1805                        For 'rmedian' and 'poly' it is the half width.
[1855]1806
[1574]1807            order:      Optional parameter for 'poly' kernel (default is 2), to
1808                        specify the order of the polnomial. Ignored by all other
1809                        kernels.
[1855]1810
[1819]1811            plot:       plot the original and the smoothed spectra.
1812                        In this each indivual fit has to be approved, by
1813                        typing 'y' or 'n'
[1855]1814
[513]1815            insitu:     if False a new scantable is returned.
1816                        Otherwise, the scaling is done in-situ
1817                        The default is taken from .asaprc (False)
[1846]1818
[513]1819        """
1820        if insitu is None: insitu = rcParams['insitu']
[876]1821        self._math._setinsitu(insitu)
[513]1822        varlist = vars()
[1819]1823
1824        if plot: orgscan = self.copy()
1825
[1574]1826        s = scantable(self._math._smooth(self, kernel.lower(), width, order))
[876]1827        s._add_history("smooth", varlist)
[1819]1828
1829        if plot:
1830            if rcParams['plotter.gui']:
1831                from asap.asaplotgui import asaplotgui as asaplot
1832            else:
1833                from asap.asaplot import asaplot
1834            self._p=asaplot()
1835            self._p.set_panels()
1836            ylab=s._get_ordinate_label()
1837            #self._p.palette(0,["#777777","red"])
1838            for r in xrange(s.nrow()):
1839                xsm=s._getabcissa(r)
1840                ysm=s._getspectrum(r)
1841                xorg=orgscan._getabcissa(r)
1842                yorg=orgscan._getspectrum(r)
1843                self._p.clear()
1844                self._p.hold()
1845                self._p.set_axes('ylabel',ylab)
1846                self._p.set_axes('xlabel',s._getabcissalabel(r))
1847                self._p.set_axes('title',s._getsourcename(r))
1848                self._p.set_line(label='Original',color="#777777")
1849                self._p.plot(xorg,yorg)
1850                self._p.set_line(label='Smoothed',color="red")
1851                self._p.plot(xsm,ysm)
1852                ### Ugly part for legend
1853                for i in [0,1]:
1854                    self._p.subplots[0]['lines'].append([self._p.subplots[0]['axes'].lines[i]])
1855                self._p.release()
1856                ### Ugly part for legend
1857                self._p.subplots[0]['lines']=[]
1858                res = raw_input("Accept smoothing ([y]/n): ")
1859                if res.upper() == 'N':
1860                    s._setspectrum(yorg, r)
1861            self._p.unmap()
1862            self._p = None
1863            del orgscan
1864
[876]1865        if insitu: self._assign(s)
1866        else: return s
[513]1867
[1862]1868    @asaplog_post_dec
[1907]1869    def old_poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None, rows=None):
[1846]1870        """\
[513]1871        Return a scan which has been baselined (all rows) by a polynomial.
[1907]1872       
[513]1873        Parameters:
[1846]1874
[794]1875            mask:       an optional mask
[1855]1876
[794]1877            order:      the order of the polynomial (default is 0)
[1855]1878
[1061]1879            plot:       plot the fit and the residual. In this each
1880                        indivual fit has to be approved, by typing 'y'
1881                        or 'n'
[1855]1882
[1391]1883            uselin:     use linear polynomial fit
[1855]1884
[794]1885            insitu:     if False a new scantable is returned.
1886                        Otherwise, the scaling is done in-situ
1887                        The default is taken from .asaprc (False)
[1846]1888
[1907]1889            rows:       row numbers of spectra to be processed.
1890                        (default is None: for all rows)
1891       
1892        Example:
[513]1893            # return a scan baselined by a third order polynomial,
1894            # not using a mask
1895            bscan = scan.poly_baseline(order=3)
[1846]1896
[579]1897        """
[513]1898        if insitu is None: insitu = rcParams['insitu']
[1819]1899        if not insitu:
1900            workscan = self.copy()
1901        else:
1902            workscan = self
[513]1903        varlist = vars()
1904        if mask is None:
[1907]1905            mask = [True for i in xrange(self.nchan())]
[1819]1906
[1217]1907        try:
1908            f = fitter()
[1391]1909            if uselin:
1910                f.set_function(lpoly=order)
1911            else:
1912                f.set_function(poly=order)
[1819]1913
[1907]1914            if rows == None:
1915                rows = xrange(workscan.nrow())
1916            elif isinstance(rows, int):
1917                rows = [ rows ]
1918           
[1819]1919            if len(rows) > 0:
1920                self.blpars = []
[1907]1921                self.masklists = []
1922                self.actualmask = []
1923           
[1819]1924            for r in rows:
1925                f.x = workscan._getabcissa(r)
1926                f.y = workscan._getspectrum(r)
[1907]1927                f.mask = mask_and(mask, workscan._getmask(r))    # (CAS-1434)
[1819]1928                f.data = None
1929                f.fit()
1930                if plot:
1931                    f.plot(residual=True)
1932                    x = raw_input("Accept fit ( [y]/n ): ")
1933                    if x.upper() == 'N':
1934                        self.blpars.append(None)
[1907]1935                        self.masklists.append(None)
1936                        self.actualmask.append(None)
[1819]1937                        continue
1938                workscan._setspectrum(f.fitter.getresidual(), r)
1939                self.blpars.append(f.get_parameters())
[1931]1940                self.masklists.append(workscan.get_masklist(f.mask, row=r, silent=True))
[1907]1941                self.actualmask.append(f.mask)
[1819]1942
1943            if plot:
1944                f._p.unmap()
1945                f._p = None
1946            workscan._add_history("poly_baseline", varlist)
[1856]1947            if insitu:
1948                self._assign(workscan)
1949            else:
1950                return workscan
[1217]1951        except RuntimeError:
1952            msg = "The fit failed, possibly because it didn't converge."
[1859]1953            raise RuntimeError(msg)
[513]1954
[1931]1955    @asaplog_post_dec
[1907]1956    def poly_baseline(self, mask=None, order=0, plot=False, batch=False, insitu=None, rows=None):
1957        """\
1958        Return a scan which has been baselined (all rows) by a polynomial.
1959        Parameters:
1960            mask:       an optional mask
1961            order:      the order of the polynomial (default is 0)
1962            plot:       plot the fit and the residual. In this each
1963                        indivual fit has to be approved, by typing 'y'
1964                        or 'n'. Ignored if batch = True.
1965            batch:      if True a faster algorithm is used and logs
1966                        including the fit results are not output
1967                        (default is False)
1968            insitu:     if False a new scantable is returned.
1969                        Otherwise, the scaling is done in-situ
1970                        The default is taken from .asaprc (False)
[1931]1971            rows:       row numbers of spectra to be baselined.
[1907]1972                        (default is None: for all rows)
1973        Example:
1974            # return a scan baselined by a third order polynomial,
1975            # not using a mask
1976            bscan = scan.poly_baseline(order=3)
1977        """
[1931]1978       
1979        varlist = vars()
1980       
[1907]1981        if insitu is None: insitu = rcParams["insitu"]
1982        if insitu:
1983            workscan = self
1984        else:
1985            workscan = self.copy()
1986
1987        nchan = workscan.nchan()
1988       
1989        if mask is None:
1990            mask = [True for i in xrange(nchan)]
1991
1992        try:
1993            if rows == None:
1994                rows = xrange(workscan.nrow())
1995            elif isinstance(rows, int):
1996                rows = [ rows ]
1997           
1998            if len(rows) > 0:
[1931]1999                workscan.blpars = []
2000                workscan.masklists = []
2001                workscan.actualmask = []
[1907]2002
2003            if batch:
[1931]2004                workscan._poly_baseline_batch(mask, order)
[1907]2005            elif plot:
2006                f = fitter()
2007                f.set_function(lpoly=order)
2008                for r in rows:
2009                    f.x = workscan._getabcissa(r)
2010                    f.y = workscan._getspectrum(r)
2011                    f.mask = mask_and(mask, workscan._getmask(r))    # (CAS-1434)
2012                    f.data = None
2013                    f.fit()
2014                   
2015                    f.plot(residual=True)
2016                    accept_fit = raw_input("Accept fit ( [y]/n ): ")
2017                    if accept_fit.upper() == "N":
2018                        self.blpars.append(None)
2019                        self.masklists.append(None)
2020                        self.actualmask.append(None)
2021                        continue
2022                    workscan._setspectrum(f.fitter.getresidual(), r)
[1931]2023                    workscan.blpars.append(f.get_parameters())
2024                    workscan.masklists.append(workscan.get_masklist(f.mask, row=r))
2025                    workscan.actualmask.append(f.mask)
[1907]2026                   
2027                f._p.unmap()
2028                f._p = None
2029            else:
2030                for r in rows:
[1931]2031                    fitparams = workscan._poly_baseline(mask, order, r)
2032                    params = fitparams.getparameters()
2033                    fmtd = ", ".join(["p%d = %3.6f" % (i, v) for i, v in enumerate(params)])
2034                    errors = fitparams.geterrors()
2035                    fmask = mask_and(mask, workscan._getmask(r))
2036
2037                    workscan.blpars.append({"params":params,
2038                                            "fixed": fitparams.getfixedparameters(),
2039                                            "formatted":fmtd, "errors":errors})
2040                    workscan.masklists.append(workscan.get_masklist(fmask, r, silent=True))
2041                    workscan.actualmask.append(fmask)
[1907]2042                   
[1931]2043                    asaplog.push(fmtd)
[1907]2044           
2045            workscan._add_history("poly_baseline", varlist)
2046           
2047            if insitu:
2048                self._assign(workscan)
2049            else:
2050                return workscan
2051           
[1919]2052        except RuntimeError, e:
[1907]2053            msg = "The fit failed, possibly because it didn't converge."
2054            if rcParams["verbose"]:
[1919]2055                asaplog.push(str(e))
[1907]2056                asaplog.push(str(msg))
2057                return
2058            else:
[1919]2059                raise RuntimeError(str(e)+'\n'+msg)
[1907]2060
2061
2062    def auto_poly_baseline(self, mask=None, edge=(0, 0), order=0,
[1280]2063                           threshold=3, chan_avg_limit=1, plot=False,
[1907]2064                           insitu=None, rows=None):
[1846]2065        """\
[1931]2066        Return a scan which has been baselined (all rows) by a polynomial.
[880]2067        Spectral lines are detected first using linefinder and masked out
2068        to avoid them affecting the baseline solution.
2069
2070        Parameters:
[1846]2071
[880]2072            mask:       an optional mask retreived from scantable
[1846]2073
2074            edge:       an optional number of channel to drop at the edge of
2075                        spectrum. If only one value is
[880]2076                        specified, the same number will be dropped from
2077                        both sides of the spectrum. Default is to keep
[907]2078                        all channels. Nested tuples represent individual
[976]2079                        edge selection for different IFs (a number of spectral
2080                        channels can be different)
[1846]2081
[880]2082            order:      the order of the polynomial (default is 0)
[1846]2083
[880]2084            threshold:  the threshold used by line finder. It is better to
2085                        keep it large as only strong lines affect the
2086                        baseline solution.
[1846]2087
[1280]2088            chan_avg_limit:
2089                        a maximum number of consequtive spectral channels to
2090                        average during the search of weak and broad lines.
2091                        The default is no averaging (and no search for weak
2092                        lines). If such lines can affect the fitted baseline
2093                        (e.g. a high order polynomial is fitted), increase this
2094                        parameter (usually values up to 8 are reasonable). Most
2095                        users of this method should find the default value
2096                        sufficient.
[1846]2097
[1061]2098            plot:       plot the fit and the residual. In this each
2099                        indivual fit has to be approved, by typing 'y'
2100                        or 'n'
[1846]2101
[880]2102            insitu:     if False a new scantable is returned.
2103                        Otherwise, the scaling is done in-situ
2104                        The default is taken from .asaprc (False)
[1907]2105            rows:       row numbers of spectra to be processed.
2106                        (default is None: for all rows)
[880]2107
[1846]2108
2109        Example::
2110
2111            scan2 = scan.auto_poly_baseline(order=7, insitu=False)
2112
[880]2113        """
2114        if insitu is None: insitu = rcParams['insitu']
2115        varlist = vars()
2116        from asap.asaplinefind import linefinder
2117        from asap import _is_sequence_or_number as _is_valid
2118
[976]2119        # check whether edge is set up for each IF individually
[1118]2120        individualedge = False;
2121        if len(edge) > 1:
2122            if isinstance(edge[0], list) or isinstance(edge[0], tuple):
2123                individualedge = True;
[907]2124
[1118]2125        if not _is_valid(edge, int) and not individualedge:
[909]2126            raise ValueError, "Parameter 'edge' has to be an integer or a \
[907]2127            pair of integers specified as a tuple. Nested tuples are allowed \
2128            to make individual selection for different IFs."
[919]2129
[1118]2130        curedge = (0, 0)
2131        if individualedge:
2132            for edgepar in edge:
2133                if not _is_valid(edgepar, int):
2134                    raise ValueError, "Each element of the 'edge' tuple has \
2135                                       to be a pair of integers or an integer."
[907]2136        else:
[1118]2137            curedge = edge;
[880]2138
[1907]2139        if not insitu:
2140            workscan = self.copy()
2141        else:
2142            workscan = self
2143
[880]2144        # setup fitter
2145        f = fitter()
[1907]2146        f.set_function(lpoly=order)
[880]2147
2148        # setup line finder
[1118]2149        fl = linefinder()
[1268]2150        fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
[880]2151
[907]2152        fl.set_scan(workscan)
2153
[1907]2154        if mask is None:
2155            mask = _n_bools(workscan.nchan(), True)
2156       
2157        if rows is None:
2158            rows = xrange(workscan.nrow())
2159        elif isinstance(rows, int):
2160            rows = [ rows ]
2161       
[1819]2162        # Save parameters of baseline fits & masklists as a class attribute.
2163        # NOTICE: It does not reflect changes in scantable!
2164        if len(rows) > 0:
2165            self.blpars=[]
2166            self.masklists=[]
[1907]2167            self.actualmask=[]
[880]2168        asaplog.push("Processing:")
2169        for r in rows:
[1118]2170            msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
2171                (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
2172                 workscan.getpol(r), workscan.getcycle(r))
[880]2173            asaplog.push(msg, False)
[907]2174
[976]2175            # figure out edge parameter
[1118]2176            if individualedge:
2177                if len(edge) >= workscan.getif(r):
2178                    raise RuntimeError, "Number of edge elements appear to " \
2179                                        "be less than the number of IFs"
2180                    curedge = edge[workscan.getif(r)]
[919]2181
[1907]2182            actualmask = mask_and(mask, workscan._getmask(r))    # (CAS-1434)
[1819]2183
[976]2184            # setup line finder
[1819]2185            fl.find_lines(r, actualmask, curedge)
[1907]2186           
[1819]2187            f.x = workscan._getabcissa(r)
2188            f.y = workscan._getspectrum(r)
[1907]2189            f.mask = fl.get_mask()
[1819]2190            f.data = None
[880]2191            f.fit()
[1819]2192
2193            # Show mask list
[1931]2194            masklist=workscan.get_masklist(f.mask, row=r, silent=True)
[1819]2195            msg = "mask range: "+str(masklist)
2196            asaplog.push(msg, False)
2197
[1061]2198            if plot:
2199                f.plot(residual=True)
2200                x = raw_input("Accept fit ( [y]/n ): ")
2201                if x.upper() == 'N':
[1819]2202                    self.blpars.append(None)
2203                    self.masklists.append(None)
[1907]2204                    self.actualmask.append(None)
[1061]2205                    continue
[1819]2206
[880]2207            workscan._setspectrum(f.fitter.getresidual(), r)
[1819]2208            self.blpars.append(f.get_parameters())
2209            self.masklists.append(masklist)
[1907]2210            self.actualmask.append(f.mask)
[1061]2211        if plot:
2212            f._p.unmap()
2213            f._p = None
2214        workscan._add_history("auto_poly_baseline", varlist)
[880]2215        if insitu:
2216            self._assign(workscan)
2217        else:
2218            return workscan
2219
[1862]2220    @asaplog_post_dec
[914]2221    def rotate_linpolphase(self, angle):
[1846]2222        """\
[914]2223        Rotate the phase of the complex polarization O=Q+iU correlation.
2224        This is always done in situ in the raw data.  So if you call this
2225        function more than once then each call rotates the phase further.
[1846]2226
[914]2227        Parameters:
[1846]2228
[914]2229            angle:   The angle (degrees) to rotate (add) by.
[1846]2230
2231        Example::
2232
[914]2233            scan.rotate_linpolphase(2.3)
[1846]2234
[914]2235        """
2236        varlist = vars()
[936]2237        self._math._rotate_linpolphase(self, angle)
[914]2238        self._add_history("rotate_linpolphase", varlist)
2239        return
[710]2240
[1862]2241    @asaplog_post_dec
[914]2242    def rotate_xyphase(self, angle):
[1846]2243        """\
[914]2244        Rotate the phase of the XY correlation.  This is always done in situ
2245        in the data.  So if you call this function more than once
2246        then each call rotates the phase further.
[1846]2247
[914]2248        Parameters:
[1846]2249
[914]2250            angle:   The angle (degrees) to rotate (add) by.
[1846]2251
2252        Example::
2253
[914]2254            scan.rotate_xyphase(2.3)
[1846]2255
[914]2256        """
2257        varlist = vars()
[936]2258        self._math._rotate_xyphase(self, angle)
[914]2259        self._add_history("rotate_xyphase", varlist)
2260        return
2261
[1862]2262    @asaplog_post_dec
[914]2263    def swap_linears(self):
[1846]2264        """\
[1573]2265        Swap the linear polarisations XX and YY, or better the first two
[1348]2266        polarisations as this also works for ciculars.
[914]2267        """
2268        varlist = vars()
[936]2269        self._math._swap_linears(self)
[914]2270        self._add_history("swap_linears", varlist)
2271        return
2272
[1862]2273    @asaplog_post_dec
[914]2274    def invert_phase(self):
[1846]2275        """\
[914]2276        Invert the phase of the complex polarisation
2277        """
2278        varlist = vars()
[936]2279        self._math._invert_phase(self)
[914]2280        self._add_history("invert_phase", varlist)
2281        return
2282
[1862]2283    @asaplog_post_dec
[876]2284    def add(self, offset, insitu=None):
[1846]2285        """\
[513]2286        Return a scan where all spectra have the offset added
[1846]2287
[513]2288        Parameters:
[1846]2289
[513]2290            offset:      the offset
[1855]2291
[513]2292            insitu:      if False a new scantable is returned.
2293                         Otherwise, the scaling is done in-situ
2294                         The default is taken from .asaprc (False)
[1846]2295
[513]2296        """
2297        if insitu is None: insitu = rcParams['insitu']
[876]2298        self._math._setinsitu(insitu)
[513]2299        varlist = vars()
[876]2300        s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]2301        s._add_history("add", varlist)
[876]2302        if insitu:
2303            self._assign(s)
2304        else:
[513]2305            return s
2306
[1862]2307    @asaplog_post_dec
[1308]2308    def scale(self, factor, tsys=True, insitu=None):
[1846]2309        """\
2310
[1938]2311        Return a scan where all spectra are scaled by the given 'factor'
[1846]2312
[513]2313        Parameters:
[1846]2314
[1819]2315            factor:      the scaling factor (float or 1D float list)
[1855]2316
[513]2317            insitu:      if False a new scantable is returned.
2318                         Otherwise, the scaling is done in-situ
2319                         The default is taken from .asaprc (False)
[1855]2320
[513]2321            tsys:        if True (default) then apply the operation to Tsys
2322                         as well as the data
[1846]2323
[513]2324        """
2325        if insitu is None: insitu = rcParams['insitu']
[876]2326        self._math._setinsitu(insitu)
[513]2327        varlist = vars()
[1819]2328        s = None
2329        import numpy
2330        if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
2331            if isinstance(factor[0], list) or isinstance(factor[0], numpy.ndarray):
2332                from asapmath import _array2dOp
2333                s = _array2dOp( self.copy(), factor, "MUL", tsys )
2334            else:
2335                s = scantable( self._math._arrayop( self.copy(), factor, "MUL", tsys ) )
2336        else:
2337            s = scantable(self._math._unaryop(self.copy(), factor, "MUL", tsys))
[1118]2338        s._add_history("scale", varlist)
[876]2339        if insitu:
2340            self._assign(s)
2341        else:
[513]2342            return s
2343
[1504]2344    def set_sourcetype(self, match, matchtype="pattern",
2345                       sourcetype="reference"):
[1846]2346        """\
[1502]2347        Set the type of the source to be an source or reference scan
[1846]2348        using the provided pattern.
2349
[1502]2350        Parameters:
[1846]2351
[1504]2352            match:          a Unix style pattern, regular expression or selector
[1855]2353
[1504]2354            matchtype:      'pattern' (default) UNIX style pattern or
2355                            'regex' regular expression
[1855]2356
[1502]2357            sourcetype:     the type of the source to use (source/reference)
[1846]2358
[1502]2359        """
2360        varlist = vars()
2361        basesel = self.get_selection()
2362        stype = -1
2363        if sourcetype.lower().startswith("r"):
2364            stype = 1
2365        elif sourcetype.lower().startswith("s"):
2366            stype = 0
[1504]2367        else:
[1502]2368            raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
[1504]2369        if matchtype.lower().startswith("p"):
2370            matchtype = "pattern"
2371        elif matchtype.lower().startswith("r"):
2372            matchtype = "regex"
2373        else:
2374            raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
[1502]2375        sel = selector()
2376        if isinstance(match, selector):
2377            sel = match
2378        else:
[1504]2379            sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
[1502]2380        self.set_selection(basesel+sel)
2381        self._setsourcetype(stype)
2382        self.set_selection(basesel)
[1573]2383        self._add_history("set_sourcetype", varlist)
[1502]2384
[1862]2385    @asaplog_post_dec
[1857]2386    @preserve_selection
[1819]2387    def auto_quotient(self, preserve=True, mode='paired', verify=False):
[1846]2388        """\
[670]2389        This function allows to build quotients automatically.
[1819]2390        It assumes the observation to have the same number of
[670]2391        "ons" and "offs"
[1846]2392
[670]2393        Parameters:
[1846]2394
[710]2395            preserve:       you can preserve (default) the continuum or
2396                            remove it.  The equations used are
[1857]2397
[670]2398                            preserve: Output = Toff * (on/off) - Toff
[1857]2399
[1070]2400                            remove:   Output = Toff * (on/off) - Ton
[1855]2401
[1573]2402            mode:           the on/off detection mode
[1348]2403                            'paired' (default)
2404                            identifies 'off' scans by the
2405                            trailing '_R' (Mopra/Parkes) or
2406                            '_e'/'_w' (Tid) and matches
2407                            on/off pairs from the observing pattern
[1502]2408                            'time'
2409                            finds the closest off in time
[1348]2410
[1857]2411        .. todo:: verify argument is not implemented
2412
[670]2413        """
[1857]2414        varlist = vars()
[1348]2415        modes = ["time", "paired"]
[670]2416        if not mode in modes:
[876]2417            msg = "please provide valid mode. Valid modes are %s" % (modes)
2418            raise ValueError(msg)
[1348]2419        s = None
2420        if mode.lower() == "paired":
[1857]2421            sel = self.get_selection()
[1875]2422            sel.set_query("SRCTYPE==psoff")
[1356]2423            self.set_selection(sel)
[1348]2424            offs = self.copy()
[1875]2425            sel.set_query("SRCTYPE==pson")
[1356]2426            self.set_selection(sel)
[1348]2427            ons = self.copy()
2428            s = scantable(self._math._quotient(ons, offs, preserve))
2429        elif mode.lower() == "time":
2430            s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]2431        s._add_history("auto_quotient", varlist)
[876]2432        return s
[710]2433
[1862]2434    @asaplog_post_dec
[1145]2435    def mx_quotient(self, mask = None, weight='median', preserve=True):
[1846]2436        """\
[1143]2437        Form a quotient using "off" beams when observing in "MX" mode.
[1846]2438
[1143]2439        Parameters:
[1846]2440
[1145]2441            mask:           an optional mask to be used when weight == 'stddev'
[1855]2442
[1143]2443            weight:         How to average the off beams.  Default is 'median'.
[1855]2444
[1145]2445            preserve:       you can preserve (default) the continuum or
[1855]2446                            remove it.  The equations used are:
[1846]2447
[1855]2448                                preserve: Output = Toff * (on/off) - Toff
2449
2450                                remove:   Output = Toff * (on/off) - Ton
2451
[1217]2452        """
[1593]2453        mask = mask or ()
[1141]2454        varlist = vars()
2455        on = scantable(self._math._mx_extract(self, 'on'))
[1143]2456        preoff = scantable(self._math._mx_extract(self, 'off'))
2457        off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1217]2458        from asapmath  import quotient
[1145]2459        q = quotient(on, off, preserve)
[1143]2460        q._add_history("mx_quotient", varlist)
[1217]2461        return q
[513]2462
[1862]2463    @asaplog_post_dec
[718]2464    def freq_switch(self, insitu=None):
[1846]2465        """\
[718]2466        Apply frequency switching to the data.
[1846]2467
[718]2468        Parameters:
[1846]2469
[718]2470            insitu:      if False a new scantable is returned.
2471                         Otherwise, the swictching is done in-situ
2472                         The default is taken from .asaprc (False)
[1846]2473
[718]2474        """
2475        if insitu is None: insitu = rcParams['insitu']
[876]2476        self._math._setinsitu(insitu)
[718]2477        varlist = vars()
[876]2478        s = scantable(self._math._freqswitch(self))
[1118]2479        s._add_history("freq_switch", varlist)
[1856]2480        if insitu:
2481            self._assign(s)
2482        else:
2483            return s
[718]2484
[1862]2485    @asaplog_post_dec
[780]2486    def recalc_azel(self):
[1846]2487        """Recalculate the azimuth and elevation for each position."""
[780]2488        varlist = vars()
[876]2489        self._recalcazel()
[780]2490        self._add_history("recalc_azel", varlist)
2491        return
2492
[1862]2493    @asaplog_post_dec
[513]2494    def __add__(self, other):
2495        varlist = vars()
2496        s = None
2497        if isinstance(other, scantable):
[1573]2498            s = scantable(self._math._binaryop(self, other, "ADD"))
[513]2499        elif isinstance(other, float):
[876]2500            s = scantable(self._math._unaryop(self, other, "ADD", False))
[513]2501        else:
[718]2502            raise TypeError("Other input is not a scantable or float value")
[513]2503        s._add_history("operator +", varlist)
2504        return s
2505
[1862]2506    @asaplog_post_dec
[513]2507    def __sub__(self, other):
2508        """
2509        implicit on all axes and on Tsys
2510        """
2511        varlist = vars()
2512        s = None
2513        if isinstance(other, scantable):
[1588]2514            s = scantable(self._math._binaryop(self, other, "SUB"))
[513]2515        elif isinstance(other, float):
[876]2516            s = scantable(self._math._unaryop(self, other, "SUB", False))
[513]2517        else:
[718]2518            raise TypeError("Other input is not a scantable or float value")
[513]2519        s._add_history("operator -", varlist)
2520        return s
[710]2521
[1862]2522    @asaplog_post_dec
[513]2523    def __mul__(self, other):
2524        """
2525        implicit on all axes and on Tsys
2526        """
2527        varlist = vars()
2528        s = None
2529        if isinstance(other, scantable):
[1588]2530            s = scantable(self._math._binaryop(self, other, "MUL"))
[513]2531        elif isinstance(other, float):
[876]2532            s = scantable(self._math._unaryop(self, other, "MUL", False))
[513]2533        else:
[718]2534            raise TypeError("Other input is not a scantable or float value")
[513]2535        s._add_history("operator *", varlist)
2536        return s
2537
[710]2538
[1862]2539    @asaplog_post_dec
[513]2540    def __div__(self, other):
2541        """
2542        implicit on all axes and on Tsys
2543        """
2544        varlist = vars()
2545        s = None
2546        if isinstance(other, scantable):
[1589]2547            s = scantable(self._math._binaryop(self, other, "DIV"))
[513]2548        elif isinstance(other, float):
2549            if other == 0.0:
[718]2550                raise ZeroDivisionError("Dividing by zero is not recommended")
[876]2551            s = scantable(self._math._unaryop(self, other, "DIV", False))
[513]2552        else:
[718]2553            raise TypeError("Other input is not a scantable or float value")
[513]2554        s._add_history("operator /", varlist)
2555        return s
2556
[1862]2557    @asaplog_post_dec
[530]2558    def get_fit(self, row=0):
[1846]2559        """\
[530]2560        Print or return the stored fits for a row in the scantable
[1846]2561
[530]2562        Parameters:
[1846]2563
[530]2564            row:    the row which the fit has been applied to.
[1846]2565
[530]2566        """
2567        if row > self.nrow():
2568            return
[976]2569        from asap.asapfit import asapfit
[530]2570        fit = asapfit(self._getfit(row))
[1859]2571        asaplog.push( '%s' %(fit) )
2572        return fit.as_dict()
[530]2573
[1483]2574    def flag_nans(self):
[1846]2575        """\
[1483]2576        Utility function to flag NaN values in the scantable.
2577        """
2578        import numpy
2579        basesel = self.get_selection()
2580        for i in range(self.nrow()):
[1589]2581            sel = self.get_row_selector(i)
2582            self.set_selection(basesel+sel)
[1483]2583            nans = numpy.isnan(self._getspectrum(0))
2584        if numpy.any(nans):
2585            bnans = [ bool(v) for v in nans]
2586            self.flag(bnans)
2587        self.set_selection(basesel)
2588
[1588]2589    def get_row_selector(self, rowno):
2590        return selector(beams=self.getbeam(rowno),
2591                        ifs=self.getif(rowno),
2592                        pols=self.getpol(rowno),
2593                        scans=self.getscan(rowno),
2594                        cycles=self.getcycle(rowno))
[1573]2595
[484]2596    def _add_history(self, funcname, parameters):
[1435]2597        if not rcParams['scantable.history']:
2598            return
[484]2599        # create date
2600        sep = "##"
2601        from datetime import datetime
2602        dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
2603        hist = dstr+sep
2604        hist += funcname+sep#cdate+sep
2605        if parameters.has_key('self'): del parameters['self']
[1118]2606        for k, v in parameters.iteritems():
[484]2607            if type(v) is dict:
[1118]2608                for k2, v2 in v.iteritems():
[484]2609                    hist += k2
2610                    hist += "="
[1118]2611                    if isinstance(v2, scantable):
[484]2612                        hist += 'scantable'
2613                    elif k2 == 'mask':
[1118]2614                        if isinstance(v2, list) or isinstance(v2, tuple):
[513]2615                            hist += str(self._zip_mask(v2))
2616                        else:
2617                            hist += str(v2)
[484]2618                    else:
[513]2619                        hist += str(v2)
[484]2620            else:
2621                hist += k
2622                hist += "="
[1118]2623                if isinstance(v, scantable):
[484]2624                    hist += 'scantable'
2625                elif k == 'mask':
[1118]2626                    if isinstance(v, list) or isinstance(v, tuple):
[513]2627                        hist += str(self._zip_mask(v))
2628                    else:
2629                        hist += str(v)
[484]2630                else:
2631                    hist += str(v)
2632            hist += sep
2633        hist = hist[:-2] # remove trailing '##'
2634        self._addhistory(hist)
2635
[710]2636
[484]2637    def _zip_mask(self, mask):
2638        mask = list(mask)
2639        i = 0
2640        segments = []
2641        while mask[i:].count(1):
2642            i += mask[i:].index(1)
2643            if mask[i:].count(0):
2644                j = i + mask[i:].index(0)
2645            else:
[710]2646                j = len(mask)
[1118]2647            segments.append([i, j])
[710]2648            i = j
[484]2649        return segments
[714]2650
[626]2651    def _get_ordinate_label(self):
2652        fu = "("+self.get_fluxunit()+")"
2653        import re
2654        lbl = "Intensity"
[1118]2655        if re.match(".K.", fu):
[626]2656            lbl = "Brightness Temperature "+ fu
[1118]2657        elif re.match(".Jy.", fu):
[626]2658            lbl = "Flux density "+ fu
2659        return lbl
[710]2660
[876]2661    def _check_ifs(self):
2662        nchans = [self.nchan(i) for i in range(self.nif(-1))]
[889]2663        nchans = filter(lambda t: t > 0, nchans)
[876]2664        return (sum(nchans)/len(nchans) == nchans[0])
[976]2665
[1862]2666    @asaplog_post_dec
[1916]2667    #def _fill(self, names, unit, average, getpt, antenna):
2668    def _fill(self, names, unit, average, opts={}):
[976]2669        first = True
2670        fullnames = []
2671        for name in names:
2672            name = os.path.expandvars(name)
2673            name = os.path.expanduser(name)
2674            if not os.path.exists(name):
2675                msg = "File '%s' does not exists" % (name)
2676                raise IOError(msg)
2677            fullnames.append(name)
2678        if average:
2679            asaplog.push('Auto averaging integrations')
[1079]2680        stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]2681        for name in fullnames:
[1073]2682            tbl = Scantable(stype)
[1843]2683            r = filler(tbl)
[1504]2684            rx = rcParams['scantable.reference']
[1843]2685            r.setreferenceexpr(rx)
[976]2686            msg = "Importing %s..." % (name)
[1118]2687            asaplog.push(msg, False)
[1916]2688            #opts = {'ms': {'antenna' : antenna, 'getpt': getpt} }
[1904]2689            r.open(name, opts)# antenna, -1, -1, getpt)
[1843]2690            r.fill()
[976]2691            if average:
[1118]2692                tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]2693            if not first:
2694                tbl = self._math._merge([self, tbl])
2695            Scantable.__init__(self, tbl)
[1843]2696            r.close()
[1118]2697            del r, tbl
[976]2698            first = False
[1861]2699            #flush log
2700        asaplog.post()
[976]2701        if unit is not None:
2702            self.set_fluxunit(unit)
[1824]2703        if not is_casapy():
2704            self.set_freqframe(rcParams['scantable.freqframe'])
[976]2705
[1402]2706    def __getitem__(self, key):
2707        if key < 0:
2708            key += self.nrow()
2709        if key >= self.nrow():
2710            raise IndexError("Row index out of range.")
2711        return self._getspectrum(key)
2712
2713    def __setitem__(self, key, value):
2714        if key < 0:
2715            key += self.nrow()
2716        if key >= self.nrow():
2717            raise IndexError("Row index out of range.")
2718        if not hasattr(value, "__len__") or \
2719                len(value) > self.nchan(self.getif(key)):
2720            raise ValueError("Spectrum length doesn't match.")
2721        return self._setspectrum(value, key)
2722
2723    def __len__(self):
2724        return self.nrow()
2725
2726    def __iter__(self):
2727        for i in range(len(self)):
2728            yield self[i]
Note: See TracBrowser for help on using the repository browser.