source: trunk/python/scantable.py @ 1994

Last change on this file since 1994 was 1994, checked in by Kana Sugimoto, 13 years ago

New Development: Yes

JIRA Issue: Yes (CAS-1306)

Ready for Test: Yes

Interface Changes: Yes

What Interface Changed: added row selection parameters to scantable.flag,

ScantableWrapper::flag and Scantable::flag.

Test Programs:

Put in Release Notes: No

Module(s): scantable class

Description:

Enabled a row selection in scantable.flag.

  • A parameter 'row' is added to scantable.flag. The default value -1 applies

specified channel flags to the all rows in the scantable (same as previous code).

  • A parameter 'whichrow' is also added to ScantableWrapper::flag and

Scantable::flag accordingly.

  • The actual flagg application part in the code Scantable::flag is moved to a new

private function Scantable::applyChanFlag(uInt whichrow, vector<bool> msk, uChar flagval).
The function applies flag with a value, 'flagval', to masked channels, 'msk',
in a selected row, 'whichrow'.


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 93.5 KB
RevLine 
[1846]1"""This module defines the scantable class."""
2
[1697]3import os
[1948]4import numpy
[1691]5try:
6    from functools import wraps as wraps_dec
7except ImportError:
8    from asap.compatibility import wraps as wraps_dec
9
[1824]10from asap.env import is_casapy
[876]11from asap._asap import Scantable
[1843]12from asap._asap import filler
[1824]13from asap.parameters import rcParams
[1862]14from asap.logging import asaplog, asaplog_post_dec
[1824]15from asap.selector import selector
16from asap.linecatalog import linecatalog
[1600]17from asap.coordinate import coordinate
[1859]18from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
[1907]19from asap.asapfitter import fitter
[102]20
[1689]21
22def preserve_selection(func):
[1691]23    @wraps_dec(func)
[1689]24    def wrap(obj, *args, **kw):
25        basesel = obj.get_selection()
[1857]26        try:
27            val = func(obj, *args, **kw)
28        finally:
29            obj.set_selection(basesel)
[1689]30        return val
31    return wrap
32
[1846]33def is_scantable(filename):
34    """Is the given file a scantable?
[1689]35
[1846]36    Parameters:
37
38        filename: the name of the file/directory to test
39
40    """
[1883]41    if ( os.path.isdir(filename)
42         and os.path.exists(filename+'/table.info')
43         and os.path.exists(filename+'/table.dat') ):
44        f=open(filename+'/table.info')
45        l=f.readline()
46        f.close()
47        #if ( l.find('Scantable') != -1 ):
48        if ( l.find('Measurement Set') == -1 ):
49            return True
50        else:
51            return False
52    else:
53        return False
54##     return (os.path.isdir(filename)
55##             and not os.path.exists(filename+'/table.f1')
56##             and os.path.exists(filename+'/table.info'))
[1697]57
[1883]58def is_ms(filename):
59    """Is the given file a MeasurementSet?
[1697]60
[1883]61    Parameters:
62
63        filename: the name of the file/directory to test
64
65    """
66    if ( os.path.isdir(filename)
67         and os.path.exists(filename+'/table.info')
68         and os.path.exists(filename+'/table.dat') ):
69        f=open(filename+'/table.info')
70        l=f.readline()
71        f.close()
72        if ( l.find('Measurement Set') != -1 ):
73            return True
74        else:
75            return False
76    else:
77        return False
78   
[876]79class scantable(Scantable):
[1846]80    """\
81        The ASAP container for scans (single-dish data).
[102]82    """
[1819]83
[1862]84    @asaplog_post_dec
[1916]85    #def __init__(self, filename, average=None, unit=None, getpt=None,
86    #             antenna=None, parallactify=None):
87    def __init__(self, filename, average=None, unit=None, parallactify=None, **args):
[1846]88        """\
[102]89        Create a scantable from a saved one or make a reference
[1846]90
[102]91        Parameters:
[1846]92
93            filename:     the name of an asap table on disk
94                          or
95                          the name of a rpfits/sdfits/ms file
96                          (integrations within scans are auto averaged
97                          and the whole file is read) or
98                          [advanced] a reference to an existing scantable
99
100            average:      average all integrations withinb a scan on read.
101                          The default (True) is taken from .asaprc.
102
[484]103            unit:         brightness unit; must be consistent with K or Jy.
[1846]104                          Over-rides the default selected by the filler
105                          (input rpfits/sdfits/ms) or replaces the value
106                          in existing scantables
107
108            getpt:        for MeasurementSet input data only:
109                          If True, all pointing data are filled.
110                          The deafult is False, which makes time to load
111                          the MS data faster in some cases.
112
[1920]113            antenna:      for MeasurementSet input data only:
114                          Antenna selection. integer (id) or string (name or id).
[1846]115
116            parallactify: Indicate that the data had been parallatified. Default
117                          is taken from rc file.
118
[710]119        """
[976]120        if average is None:
[710]121            average = rcParams['scantable.autoaverage']
[1916]122        #if getpt is None:
123        #    getpt = True
124        #if antenna is not None:
125        #    asaplog.push("Antenna selection currently unsupported."
126        #                 "Using ''")
127        #    asaplog.post('WARN')
128        #if antenna is None:
129        #    antenna = ''
130        #elif type(antenna) == int:
131        #    antenna = '%s' % antenna
132        #elif type(antenna) == list:
133        #    tmpstr = ''
134        #    for i in range( len(antenna) ):
135        #        if type(antenna[i]) == int:
136        #            tmpstr = tmpstr + ('%s,'%(antenna[i]))
137        #        elif type(antenna[i]) == str:
138        #            tmpstr=tmpstr+antenna[i]+','
139        #        else:
140        #            raise TypeError('Bad antenna selection.')
141        #    antenna = tmpstr.rstrip(',')
[1593]142        parallactify = parallactify or rcParams['scantable.parallactify']
[1259]143        varlist = vars()
[876]144        from asap._asap import stmath
[1819]145        self._math = stmath( rcParams['insitu'] )
[876]146        if isinstance(filename, Scantable):
147            Scantable.__init__(self, filename)
[181]148        else:
[1697]149            if isinstance(filename, str):
[976]150                filename = os.path.expandvars(filename)
151                filename = os.path.expanduser(filename)
152                if not os.path.exists(filename):
153                    s = "File '%s' not found." % (filename)
154                    raise IOError(s)
[1697]155                if is_scantable(filename):
156                    ondisk = rcParams['scantable.storage'] == 'disk'
157                    Scantable.__init__(self, filename, ondisk)
158                    if unit is not None:
159                        self.set_fluxunit(unit)
[1819]160                    # do not reset to the default freqframe
161                    #self.set_freqframe(rcParams['scantable.freqframe'])
[1883]162                #elif os.path.isdir(filename) \
163                #         and not os.path.exists(filename+'/table.f1'):
164                elif is_ms(filename):
[1916]165                    # Measurement Set
166                    opts={'ms': {}}
167                    mskeys=['getpt','antenna']
168                    for key in mskeys:
169                        if key in args.keys():
170                            opts['ms'][key] = args[key]
171                    #self._fill([filename], unit, average, getpt, antenna)
172                    self._fill([filename], unit, average, opts)
[1893]173                elif os.path.isfile(filename):
[1916]174                    #self._fill([filename], unit, average, getpt, antenna)
175                    self._fill([filename], unit, average)
[1883]176                else:
[1819]177                    msg = "The given file '%s'is not a valid " \
178                          "asap table." % (filename)
[1859]179                    raise IOError(msg)
[1118]180            elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]181                  and isinstance(filename[-1], str):
[1916]182                #self._fill(filename, unit, average, getpt, antenna)
183                self._fill(filename, unit, average)
[1586]184        self.parallactify(parallactify)
[1259]185        self._add_history("scantable", varlist)
[102]186
[1862]187    @asaplog_post_dec
[876]188    def save(self, name=None, format=None, overwrite=False):
[1846]189        """\
[1280]190        Store the scantable on disk. This can be an asap (aips++) Table,
191        SDFITS or MS2 format.
[1846]192
[116]193        Parameters:
[1846]194
[1093]195            name:        the name of the outputfile. For format "ASCII"
196                         this is the root file name (data in 'name'.txt
[497]197                         and header in 'name'_header.txt)
[1855]198
[116]199            format:      an optional file format. Default is ASAP.
[1855]200                         Allowed are:
201
202                            * 'ASAP' (save as ASAP [aips++] Table),
203                            * 'SDFITS' (save as SDFITS file)
204                            * 'ASCII' (saves as ascii text file)
205                            * 'MS2' (saves as an casacore MeasurementSet V2)
206                            * 'FITS' (save as image FITS - not readable by class)
207                            * 'CLASS' (save as FITS readable by CLASS)
208
[411]209            overwrite:   If the file should be overwritten if it exists.
[256]210                         The default False is to return with warning
[411]211                         without writing the output. USE WITH CARE.
[1855]212
[1846]213        Example::
214
[116]215            scan.save('myscan.asap')
[1118]216            scan.save('myscan.sdfits', 'SDFITS')
[1846]217
[116]218        """
[411]219        from os import path
[1593]220        format = format or rcParams['scantable.save']
[256]221        suffix = '.'+format.lower()
[1118]222        if name is None or name == "":
[256]223            name = 'scantable'+suffix
[718]224            msg = "No filename given. Using default name %s..." % name
225            asaplog.push(msg)
[411]226        name = path.expandvars(name)
[256]227        if path.isfile(name) or path.isdir(name):
228            if not overwrite:
[718]229                msg = "File %s exists." % name
[1859]230                raise IOError(msg)
[451]231        format2 = format.upper()
232        if format2 == 'ASAP':
[116]233            self._save(name)
234        else:
[989]235            from asap._asap import stwriter as stw
[1118]236            writer = stw(format2)
237            writer.write(self, name)
[116]238        return
239
[102]240    def copy(self):
[1846]241        """Return a copy of this scantable.
242
243        *Note*:
244
[1348]245            This makes a full (deep) copy. scan2 = scan1 makes a reference.
[1846]246
247        Example::
248
[102]249            copiedscan = scan.copy()
[1846]250
[102]251        """
[876]252        sd = scantable(Scantable._copy(self))
[113]253        return sd
254
[1093]255    def drop_scan(self, scanid=None):
[1846]256        """\
[1093]257        Return a new scantable where the specified scan number(s) has(have)
258        been dropped.
[1846]259
[1093]260        Parameters:
[1846]261
[1093]262            scanid:    a (list of) scan number(s)
[1846]263
[1093]264        """
265        from asap import _is_sequence_or_number as _is_valid
266        from asap import _to_list
267        from asap import unique
268        if not _is_valid(scanid):
[1859]269            raise RuntimeError( 'Please specify a scanno to drop from the scantable' )
270        scanid = _to_list(scanid)
271        allscans = unique([ self.getscan(i) for i in range(self.nrow())])
272        for sid in scanid: allscans.remove(sid)
273        if len(allscans) == 0:
274            raise ValueError("Can't remove all scans")
275        sel = selector(scans=allscans)
276        return self._select_copy(sel)
[1093]277
[1594]278    def _select_copy(self, selection):
279        orig = self.get_selection()
280        self.set_selection(orig+selection)
281        cp = self.copy()
282        self.set_selection(orig)
283        return cp
284
[102]285    def get_scan(self, scanid=None):
[1855]286        """\
[102]287        Return a specific scan (by scanno) or collection of scans (by
288        source name) in a new scantable.
[1846]289
290        *Note*:
291
[1348]292            See scantable.drop_scan() for the inverse operation.
[1846]293
[102]294        Parameters:
[1846]295
[513]296            scanid:    a (list of) scanno or a source name, unix-style
297                       patterns are accepted for source name matching, e.g.
298                       '*_R' gets all 'ref scans
[1846]299
300        Example::
301
[513]302            # get all scans containing the source '323p459'
303            newscan = scan.get_scan('323p459')
304            # get all 'off' scans
305            refscans = scan.get_scan('*_R')
306            # get a susbset of scans by scanno (as listed in scan.summary())
[1118]307            newscan = scan.get_scan([0, 2, 7, 10])
[1846]308
[102]309        """
310        if scanid is None:
[1859]311            raise RuntimeError( 'Please specify a scan no or name to '
312                                'retrieve from the scantable' )
[102]313        try:
[946]314            bsel = self.get_selection()
315            sel = selector()
[102]316            if type(scanid) is str:
[946]317                sel.set_name(scanid)
[1594]318                return self._select_copy(sel)
[102]319            elif type(scanid) is int:
[946]320                sel.set_scans([scanid])
[1594]321                return self._select_copy(sel)
[381]322            elif type(scanid) is list:
[946]323                sel.set_scans(scanid)
[1594]324                return self._select_copy(sel)
[381]325            else:
[718]326                msg = "Illegal scanid type, use 'int' or 'list' if ints."
[1859]327                raise TypeError(msg)
[102]328        except RuntimeError:
[1859]329            raise
[102]330
331    def __str__(self):
[1118]332        return Scantable._summary(self, True)
[102]333
[976]334    def summary(self, filename=None):
[1846]335        """\
[102]336        Print a summary of the contents of this scantable.
[1846]337
[102]338        Parameters:
[1846]339
[1931]340            filename:    the name of a file to write the putput to
[102]341                         Default - no file output
[1846]342
[102]343        """
[976]344        info = Scantable._summary(self, True)
[102]345        if filename is not None:
[256]346            if filename is "":
347                filename = 'scantable_summary.txt'
[415]348            from os.path import expandvars, isdir
[411]349            filename = expandvars(filename)
[415]350            if not isdir(filename):
[413]351                data = open(filename, 'w')
352                data.write(info)
353                data.close()
354            else:
[718]355                msg = "Illegal file name '%s'." % (filename)
[1859]356                raise IOError(msg)
357        return page(info)
[710]358
[1512]359    def get_spectrum(self, rowno):
[1471]360        """Return the spectrum for the current row in the scantable as a list.
[1846]361
[1471]362        Parameters:
[1846]363
[1573]364             rowno:   the row number to retrieve the spectrum from
[1846]365
[1471]366        """
367        return self._getspectrum(rowno)
[946]368
[1471]369    def get_mask(self, rowno):
370        """Return the mask for the current row in the scantable as a list.
[1846]371
[1471]372        Parameters:
[1846]373
[1573]374             rowno:   the row number to retrieve the mask from
[1846]375
[1471]376        """
377        return self._getmask(rowno)
378
379    def set_spectrum(self, spec, rowno):
[1938]380        """Set the spectrum for the current row in the scantable.
[1846]381
[1471]382        Parameters:
[1846]383
[1855]384             spec:   the new spectrum
[1846]385
[1855]386             rowno:  the row number to set the spectrum for
387
[1471]388        """
389        assert(len(spec) == self.nchan())
390        return self._setspectrum(spec, rowno)
391
[1600]392    def get_coordinate(self, rowno):
393        """Return the (spectral) coordinate for a a given 'rowno'.
[1846]394
395        *Note*:
396
[1600]397            * This coordinate is only valid until a scantable method modifies
398              the frequency axis.
399            * This coordinate does contain the original frequency set-up
400              NOT the new frame. The conversions however are done using the user
401              specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
402              use scantable.freq_align first. Without it there is no closure,
[1846]403              i.e.::
[1600]404
[1846]405                  c = myscan.get_coordinate(0)
406                  c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
407
[1600]408        Parameters:
[1846]409
[1600]410             rowno:    the row number for the spectral coordinate
411
412        """
413        return coordinate(Scantable.get_coordinate(self, rowno))
414
[946]415    def get_selection(self):
[1846]416        """\
[1005]417        Get the selection object currently set on this scantable.
[1846]418
419        Example::
420
[1005]421            sel = scan.get_selection()
422            sel.set_ifs(0)              # select IF 0
423            scan.set_selection(sel)     # apply modified selection
[1846]424
[946]425        """
426        return selector(self._getselection())
427
[1576]428    def set_selection(self, selection=None, **kw):
[1846]429        """\
[1005]430        Select a subset of the data. All following operations on this scantable
431        are only applied to thi selection.
[1846]432
[1005]433        Parameters:
[1697]434
[1846]435            selection:    a selector object (default unset the selection), or
436                          any combination of "pols", "ifs", "beams", "scans",
437                          "cycles", "name", "query"
[1697]438
[1846]439        Examples::
[1697]440
[1005]441            sel = selector()         # create a selection object
[1118]442            self.set_scans([0, 3])    # select SCANNO 0 and 3
[1005]443            scan.set_selection(sel)  # set the selection
444            scan.summary()           # will only print summary of scanno 0 an 3
445            scan.set_selection()     # unset the selection
[1697]446            # or the equivalent
447            scan.set_selection(scans=[0,3])
448            scan.summary()           # will only print summary of scanno 0 an 3
449            scan.set_selection()     # unset the selection
[1846]450
[946]451        """
[1576]452        if selection is None:
453            # reset
454            if len(kw) == 0:
455                selection = selector()
456            else:
457                # try keywords
458                for k in kw:
459                    if k not in selector.fields:
460                        raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
461                selection = selector(**kw)
[946]462        self._setselection(selection)
463
[1819]464    def get_row(self, row=0, insitu=None):
[1846]465        """\
[1819]466        Select a row in the scantable.
467        Return a scantable with single row.
[1846]468
[1819]469        Parameters:
[1846]470
471            row:    row no of integration, default is 0.
472            insitu: if False a new scantable is returned. Otherwise, the
473                    scaling is done in-situ. The default is taken from .asaprc
474                    (False)
475
[1819]476        """
477        if insitu is None: insitu = rcParams['insitu']
478        if not insitu:
479            workscan = self.copy()
480        else:
481            workscan = self
482        # Select a row
483        sel=selector()
[1992]484        sel.set_rows([row])
485        #sel.set_scans([workscan.getscan(row)])
486        #sel.set_cycles([workscan.getcycle(row)])
487        #sel.set_beams([workscan.getbeam(row)])
488        #sel.set_ifs([workscan.getif(row)])
489        #sel.set_polarisations([workscan.getpol(row)])
490        #sel.set_name(workscan._getsourcename(row))
[1819]491        workscan.set_selection(sel)
492        if not workscan.nrow() == 1:
493            msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
494            raise RuntimeError(msg)
495        del sel
496        if insitu:
497            self._assign(workscan)
498        else:
499            return workscan
500
[1862]501    @asaplog_post_dec
[1907]502    def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
[1846]503        """\
[135]504        Determine the specified statistic of the current beam/if/pol
[102]505        Takes a 'mask' as an optional parameter to specify which
506        channels should be excluded.
[1846]507
[102]508        Parameters:
[1846]509
[1819]510            stat:    'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
511                     'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
[1855]512
[135]513            mask:    an optional mask specifying where the statistic
[102]514                     should be determined.
[1855]515
[1819]516            form:    format string to print statistic values
[1846]517
[1907]518            row:     row number of spectrum to process.
519                     (default is None: for all rows)
[1846]520
[1907]521        Example:
[113]522            scan.set_unit('channel')
[1118]523            msk = scan.create_mask([100, 200], [500, 600])
[135]524            scan.stats(stat='mean', mask=m)
[1846]525
[102]526        """
[1593]527        mask = mask or []
[876]528        if not self._check_ifs():
[1118]529            raise ValueError("Cannot apply mask as the IFs have different "
530                             "number of channels. Please use setselection() "
531                             "to select individual IFs")
[1819]532        rtnabc = False
533        if stat.lower().endswith('_abc'): rtnabc = True
534        getchan = False
535        if stat.lower().startswith('min') or stat.lower().startswith('max'):
536            chan = self._math._minmaxchan(self, mask, stat)
537            getchan = True
538            statvals = []
[1907]539        if not rtnabc:
540            if row == None:
541                statvals = self._math._stats(self, mask, stat)
542            else:
543                statvals = self._math._statsrow(self, mask, stat, int(row))
[256]544
[1819]545        #def cb(i):
546        #    return statvals[i]
[256]547
[1819]548        #return self._row_callback(cb, stat)
[102]549
[1819]550        label=stat
551        #callback=cb
552        out = ""
553        #outvec = []
554        sep = '-'*50
[1907]555
556        if row == None:
557            rows = xrange(self.nrow())
558        elif isinstance(row, int):
559            rows = [ row ]
560
561        for i in rows:
[1819]562            refstr = ''
563            statunit= ''
564            if getchan:
565                qx, qy = self.chan2data(rowno=i, chan=chan[i])
566                if rtnabc:
567                    statvals.append(qx['value'])
568                    refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
569                    statunit= '['+qx['unit']+']'
570                else:
571                    refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
572
573            tm = self._gettime(i)
574            src = self._getsourcename(i)
575            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
576            out += 'Time[%s]:\n' % (tm)
[1907]577            if self.nbeam(-1) > 1: out +=  ' Beam[%d] ' % (self.getbeam(i))
578            if self.nif(-1) > 1:   out +=  ' IF[%d] ' % (self.getif(i))
579            if self.npol(-1) > 1:  out +=  ' Pol[%d] ' % (self.getpol(i))
[1819]580            #outvec.append(callback(i))
[1907]581            if len(rows) > 1:
582                # out += ('= %'+form) % (outvec[i]) +'   '+refstr+'\n'
583                out += ('= %'+form) % (statvals[i]) +'   '+refstr+'\n'
584            else:
585                # out += ('= %'+form) % (outvec[0]) +'   '+refstr+'\n'
586                out += ('= %'+form) % (statvals[0]) +'   '+refstr+'\n'
[1819]587            out +=  sep+"\n"
588
[1859]589        import os
590        if os.environ.has_key( 'USER' ):
591            usr = os.environ['USER']
592        else:
593            import commands
594            usr = commands.getoutput( 'whoami' )
595        tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
596        f = open(tmpfile,'w')
597        print >> f, sep
598        print >> f, ' %s %s' % (label, statunit)
599        print >> f, sep
600        print >> f, out
601        f.close()
602        f = open(tmpfile,'r')
603        x = f.readlines()
604        f.close()
605        asaplog.push(''.join(x), False)
606
[1819]607        return statvals
608
609    def chan2data(self, rowno=0, chan=0):
[1846]610        """\
[1819]611        Returns channel/frequency/velocity and spectral value
612        at an arbitrary row and channel in the scantable.
[1846]613
[1819]614        Parameters:
[1846]615
[1819]616            rowno:   a row number in the scantable. Default is the
617                     first row, i.e. rowno=0
[1855]618
[1819]619            chan:    a channel in the scantable. Default is the first
620                     channel, i.e. pos=0
[1846]621
[1819]622        """
623        if isinstance(rowno, int) and isinstance(chan, int):
624            qx = {'unit': self.get_unit(),
625                  'value': self._getabcissa(rowno)[chan]}
626            qy = {'unit': self.get_fluxunit(),
627                  'value': self._getspectrum(rowno)[chan]}
628            return qx, qy
629
[1118]630    def stddev(self, mask=None):
[1846]631        """\
[135]632        Determine the standard deviation of the current beam/if/pol
633        Takes a 'mask' as an optional parameter to specify which
634        channels should be excluded.
[1846]635
[135]636        Parameters:
[1846]637
[135]638            mask:    an optional mask specifying where the standard
639                     deviation should be determined.
640
[1846]641        Example::
642
[135]643            scan.set_unit('channel')
[1118]644            msk = scan.create_mask([100, 200], [500, 600])
[135]645            scan.stddev(mask=m)
[1846]646
[135]647        """
[1118]648        return self.stats(stat='stddev', mask=mask);
[135]649
[1003]650
[1259]651    def get_column_names(self):
[1846]652        """\
[1003]653        Return a  list of column names, which can be used for selection.
654        """
[1259]655        return list(Scantable.get_column_names(self))
[1003]656
[1730]657    def get_tsys(self, row=-1):
[1846]658        """\
[113]659        Return the System temperatures.
[1846]660
661        Parameters:
662
663            row:    the rowno to get the information for. (default all rows)
664
[113]665        Returns:
[1846]666
[876]667            a list of Tsys values for the current selection
[1846]668
[113]669        """
[1730]670        if row > -1:
671            return self._get_column(self._gettsys, row)
[876]672        return self._row_callback(self._gettsys, "Tsys")
[256]673
[1730]674
675    def get_weather(self, row=-1):
[1846]676        """\
677        Return the weather informations.
678
679        Parameters:
680
681            row:    the rowno to get the information for. (default all rows)
682
683        Returns:
684
685            a dict or list of of dicts of values for the current selection
686
687        """
688
[1730]689        values = self._get_column(self._get_weather, row)
690        if row > -1:
691            return {'temperature': values[0],
692                    'pressure': values[1], 'humidity' : values[2],
693                    'windspeed' : values[3], 'windaz' : values[4]
694                    }
695        else:
696            out = []
697            for r in values:
698
699                out.append({'temperature': r[0],
700                            'pressure': r[1], 'humidity' : r[2],
701                            'windspeed' : r[3], 'windaz' : r[4]
702                    })
703            return out
704
[876]705    def _row_callback(self, callback, label):
706        out = ""
[1118]707        outvec = []
[1590]708        sep = '-'*50
[876]709        for i in range(self.nrow()):
710            tm = self._gettime(i)
711            src = self._getsourcename(i)
[1590]712            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
[876]713            out += 'Time[%s]:\n' % (tm)
[1590]714            if self.nbeam(-1) > 1:
715                out +=  ' Beam[%d] ' % (self.getbeam(i))
716            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (self.getif(i))
717            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (self.getpol(i))
[876]718            outvec.append(callback(i))
719            out += '= %3.3f\n' % (outvec[i])
[1590]720            out +=  sep+'\n'
[1859]721
722        asaplog.push(sep)
723        asaplog.push(" %s" % (label))
724        asaplog.push(sep)
725        asaplog.push(out)
[1861]726        asaplog.post()
[1175]727        return outvec
[256]728
[1947]729    def _get_column(self, callback, row=-1, *args):
[1070]730        """
731        """
732        if row == -1:
[1947]733            return [callback(i, *args) for i in range(self.nrow())]
[1070]734        else:
[1819]735            if  0 <= row < self.nrow():
[1947]736                return callback(row, *args)
[256]737
[1070]738
[1948]739    def get_time(self, row=-1, asdatetime=False, prec=-1):
[1846]740        """\
[113]741        Get a list of time stamps for the observations.
[1938]742        Return a datetime object or a string (default) for each
743        integration time stamp in the scantable.
[1846]744
[113]745        Parameters:
[1846]746
[1348]747            row:          row no of integration. Default -1 return all rows
[1855]748
[1348]749            asdatetime:   return values as datetime objects rather than strings
[1846]750
[1948]751            prec:         number of digits shown. Default -1 to automatic calculation.
752                          Note this number is equals to the digits of MVTime,
753                          i.e., 0<prec<3: dates with hh:: only,
754                          <5: with hh:mm:, <7 or 0: with hh:mm:ss,
[1947]755                          and 6> : with hh:mm:ss.tt... (prec-6 t's added)
756
[113]757        """
[1175]758        from datetime import datetime
[1948]759        if prec < 0:
760            # automagically set necessary precision +1
[1950]761            prec = 7 - numpy.floor(numpy.log10(numpy.min(self.get_inttime(row))))
[1948]762            prec = max(6, int(prec))
763        else:
764            prec = max(0, prec)
765        if asdatetime:
766            #precision can be 1 millisecond at max
767            prec = min(12, prec)
768
[1947]769        times = self._get_column(self._gettime, row, prec)
[1348]770        if not asdatetime:
[1392]771            return times
[1947]772        format = "%Y/%m/%d/%H:%M:%S.%f"
773        if prec < 7:
774            nsub = 1 + (((6-prec)/2) % 3)
775            substr = [".%f","%S","%M"]
776            for i in range(nsub):
777                format = format.replace(substr[i],"")
[1175]778        if isinstance(times, list):
[1947]779            return [datetime.strptime(i, format) for i in times]
[1175]780        else:
[1947]781            return datetime.strptime(times, format)
[102]782
[1348]783
784    def get_inttime(self, row=-1):
[1846]785        """\
[1348]786        Get a list of integration times for the observations.
787        Return a time in seconds for each integration in the scantable.
[1846]788
[1348]789        Parameters:
[1846]790
[1348]791            row:    row no of integration. Default -1 return all rows.
[1846]792
[1348]793        """
[1573]794        return self._get_column(self._getinttime, row)
[1348]795
[1573]796
[714]797    def get_sourcename(self, row=-1):
[1846]798        """\
[794]799        Get a list source names for the observations.
[714]800        Return a string for each integration in the scantable.
801        Parameters:
[1846]802
[1348]803            row:    row no of integration. Default -1 return all rows.
[1846]804
[714]805        """
[1070]806        return self._get_column(self._getsourcename, row)
[714]807
[794]808    def get_elevation(self, row=-1):
[1846]809        """\
[794]810        Get a list of elevations for the observations.
811        Return a float for each integration in the scantable.
[1846]812
[794]813        Parameters:
[1846]814
[1348]815            row:    row no of integration. Default -1 return all rows.
[1846]816
[794]817        """
[1070]818        return self._get_column(self._getelevation, row)
[794]819
820    def get_azimuth(self, row=-1):
[1846]821        """\
[794]822        Get a list of azimuths for the observations.
823        Return a float for each integration in the scantable.
[1846]824
[794]825        Parameters:
[1348]826            row:    row no of integration. Default -1 return all rows.
[1846]827
[794]828        """
[1070]829        return self._get_column(self._getazimuth, row)
[794]830
831    def get_parangle(self, row=-1):
[1846]832        """\
[794]833        Get a list of parallactic angles for the observations.
834        Return a float for each integration in the scantable.
[1846]835
[794]836        Parameters:
[1846]837
[1348]838            row:    row no of integration. Default -1 return all rows.
[1846]839
[794]840        """
[1070]841        return self._get_column(self._getparangle, row)
[794]842
[1070]843    def get_direction(self, row=-1):
844        """
845        Get a list of Positions on the sky (direction) for the observations.
[1594]846        Return a string for each integration in the scantable.
[1855]847
[1070]848        Parameters:
[1855]849
[1070]850            row:    row no of integration. Default -1 return all rows
[1855]851
[1070]852        """
853        return self._get_column(self._getdirection, row)
854
[1391]855    def get_directionval(self, row=-1):
[1846]856        """\
[1391]857        Get a list of Positions on the sky (direction) for the observations.
858        Return a float for each integration in the scantable.
[1846]859
[1391]860        Parameters:
[1846]861
[1391]862            row:    row no of integration. Default -1 return all rows
[1846]863
[1391]864        """
865        return self._get_column(self._getdirectionvec, row)
866
[1862]867    @asaplog_post_dec
[102]868    def set_unit(self, unit='channel'):
[1846]869        """\
[102]870        Set the unit for all following operations on this scantable
[1846]871
[102]872        Parameters:
[1846]873
874            unit:    optional unit, default is 'channel'. Use one of '*Hz',
875                     'km/s', 'channel' or equivalent ''
876
[102]877        """
[484]878        varlist = vars()
[1118]879        if unit in ['', 'pixel', 'channel']:
[113]880            unit = ''
881        inf = list(self._getcoordinfo())
882        inf[0] = unit
883        self._setcoordinfo(inf)
[1118]884        self._add_history("set_unit", varlist)
[113]885
[1862]886    @asaplog_post_dec
[484]887    def set_instrument(self, instr):
[1846]888        """\
[1348]889        Set the instrument for subsequent processing.
[1846]890
[358]891        Parameters:
[1846]892
[710]893            instr:    Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]894                      'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[1846]895
[358]896        """
897        self._setInstrument(instr)
[1118]898        self._add_history("set_instument", vars())
[358]899
[1862]900    @asaplog_post_dec
[1190]901    def set_feedtype(self, feedtype):
[1846]902        """\
[1190]903        Overwrite the feed type, which might not be set correctly.
[1846]904
[1190]905        Parameters:
[1846]906
[1190]907            feedtype:     'linear' or 'circular'
[1846]908
[1190]909        """
910        self._setfeedtype(feedtype)
911        self._add_history("set_feedtype", vars())
912
[1862]913    @asaplog_post_dec
[276]914    def set_doppler(self, doppler='RADIO'):
[1846]915        """\
[276]916        Set the doppler for all following operations on this scantable.
[1846]917
[276]918        Parameters:
[1846]919
[276]920            doppler:    One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
[1846]921
[276]922        """
[484]923        varlist = vars()
[276]924        inf = list(self._getcoordinfo())
925        inf[2] = doppler
926        self._setcoordinfo(inf)
[1118]927        self._add_history("set_doppler", vars())
[710]928
[1862]929    @asaplog_post_dec
[226]930    def set_freqframe(self, frame=None):
[1846]931        """\
[113]932        Set the frame type of the Spectral Axis.
[1846]933
[113]934        Parameters:
[1846]935
[591]936            frame:   an optional frame type, default 'LSRK'. Valid frames are:
[1819]937                     'TOPO', 'LSRD', 'LSRK', 'BARY',
[1118]938                     'GEO', 'GALACTO', 'LGROUP', 'CMB'
[1846]939
940        Example::
941
[113]942            scan.set_freqframe('BARY')
[1846]943
[113]944        """
[1593]945        frame = frame or rcParams['scantable.freqframe']
[484]946        varlist = vars()
[1819]947        # "REST" is not implemented in casacore
948        #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
949        #           'GEO', 'GALACTO', 'LGROUP', 'CMB']
950        valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
[1118]951                   'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]952
[989]953        if frame in valid:
[113]954            inf = list(self._getcoordinfo())
955            inf[1] = frame
956            self._setcoordinfo(inf)
[1118]957            self._add_history("set_freqframe", varlist)
[102]958        else:
[1118]959            msg  = "Please specify a valid freq type. Valid types are:\n", valid
[1859]960            raise TypeError(msg)
[710]961
[1862]962    @asaplog_post_dec
[989]963    def set_dirframe(self, frame=""):
[1846]964        """\
[989]965        Set the frame type of the Direction on the sky.
[1846]966
[989]967        Parameters:
[1846]968
[989]969            frame:   an optional frame type, default ''. Valid frames are:
970                     'J2000', 'B1950', 'GALACTIC'
[1846]971
972        Example:
973
[989]974            scan.set_dirframe('GALACTIC')
[1846]975
[989]976        """
977        varlist = vars()
[1859]978        Scantable.set_dirframe(self, frame)
[1118]979        self._add_history("set_dirframe", varlist)
[989]980
[113]981    def get_unit(self):
[1846]982        """\
[113]983        Get the default unit set in this scantable
[1846]984
[113]985        Returns:
[1846]986
[113]987            A unit string
[1846]988
[113]989        """
990        inf = self._getcoordinfo()
991        unit = inf[0]
992        if unit == '': unit = 'channel'
993        return unit
[102]994
[1862]995    @asaplog_post_dec
[158]996    def get_abcissa(self, rowno=0):
[1846]997        """\
[158]998        Get the abcissa in the current coordinate setup for the currently
[113]999        selected Beam/IF/Pol
[1846]1000
[113]1001        Parameters:
[1846]1002
[226]1003            rowno:    an optional row number in the scantable. Default is the
1004                      first row, i.e. rowno=0
[1846]1005
[113]1006        Returns:
[1846]1007
[1348]1008            The abcissa values and the format string (as a dictionary)
[1846]1009
[113]1010        """
[256]1011        abc = self._getabcissa(rowno)
[710]1012        lbl = self._getabcissalabel(rowno)
[158]1013        return abc, lbl
[113]1014
[1862]1015    @asaplog_post_dec
[1994]1016    def flag(self, row=-1, mask=None, unflag=False):
[1846]1017        """\
[1001]1018        Flag the selected data using an optional channel mask.
[1846]1019
[1001]1020        Parameters:
[1846]1021
[1994]1022            row:    an optional row number in the scantable.
1023                      Default -1 flags all rows
1024                     
[1001]1025            mask:   an optional channel mask, created with create_mask. Default
1026                    (no mask) is all channels.
[1855]1027
[1819]1028            unflag:    if True, unflag the data
[1846]1029
[1001]1030        """
1031        varlist = vars()
[1593]1032        mask = mask or []
[1994]1033        self._flag(row, mask, unflag)
[1001]1034        self._add_history("flag", varlist)
1035
[1862]1036    @asaplog_post_dec
[1819]1037    def flag_row(self, rows=[], unflag=False):
[1846]1038        """\
[1819]1039        Flag the selected data in row-based manner.
[1846]1040
[1819]1041        Parameters:
[1846]1042
[1843]1043            rows:   list of row numbers to be flagged. Default is no row
1044                    (must be explicitly specified to execute row-based flagging).
[1855]1045
[1819]1046            unflag: if True, unflag the data.
[1846]1047
[1819]1048        """
1049        varlist = vars()
[1859]1050        self._flag_row(rows, unflag)
[1819]1051        self._add_history("flag_row", varlist)
1052
[1862]1053    @asaplog_post_dec
[1819]1054    def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
[1846]1055        """\
[1819]1056        Flag the selected data outside a specified range (in channel-base)
[1846]1057
[1819]1058        Parameters:
[1846]1059
[1819]1060            uthres:      upper threshold.
[1855]1061
[1819]1062            dthres:      lower threshold
[1846]1063
[1819]1064            clipoutside: True for flagging data outside the range [dthres:uthres].
[1928]1065                         False for flagging data inside the range.
[1855]1066
[1846]1067            unflag:      if True, unflag the data.
1068
[1819]1069        """
1070        varlist = vars()
[1859]1071        self._clip(uthres, dthres, clipoutside, unflag)
[1819]1072        self._add_history("clip", varlist)
1073
[1862]1074    @asaplog_post_dec
[1584]1075    def lag_flag(self, start, end, unit="MHz", insitu=None):
[1846]1076        """\
[1192]1077        Flag the data in 'lag' space by providing a frequency to remove.
[1584]1078        Flagged data in the scantable gets interpolated over the region.
[1192]1079        No taper is applied.
[1846]1080
[1192]1081        Parameters:
[1846]1082
[1579]1083            start:    the start frequency (really a period within the
1084                      bandwidth)  or period to remove
[1855]1085
[1579]1086            end:      the end frequency or period to remove
[1855]1087
[1584]1088            unit:     the frequency unit (default "MHz") or "" for
[1579]1089                      explicit lag channels
[1846]1090
1091        *Notes*:
1092
[1579]1093            It is recommended to flag edges of the band or strong
[1348]1094            signals beforehand.
[1846]1095
[1192]1096        """
1097        if insitu is None: insitu = rcParams['insitu']
1098        self._math._setinsitu(insitu)
1099        varlist = vars()
[1579]1100        base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1101        if not (unit == "" or base.has_key(unit)):
[1192]1102            raise ValueError("%s is not a valid unit." % unit)
[1859]1103        if unit == "":
1104            s = scantable(self._math._lag_flag(self, start, end, "lags"))
1105        else:
1106            s = scantable(self._math._lag_flag(self, start*base[unit],
1107                                               end*base[unit], "frequency"))
[1192]1108        s._add_history("lag_flag", varlist)
1109        if insitu:
1110            self._assign(s)
1111        else:
1112            return s
[1001]1113
[1862]1114    @asaplog_post_dec
[113]1115    def create_mask(self, *args, **kwargs):
[1846]1116        """\
[1118]1117        Compute and return a mask based on [min, max] windows.
[189]1118        The specified windows are to be INCLUDED, when the mask is
[113]1119        applied.
[1846]1120
[102]1121        Parameters:
[1846]1122
[1118]1123            [min, max], [min2, max2], ...
[1024]1124                Pairs of start/end points (inclusive)specifying the regions
[102]1125                to be masked
[1855]1126
[189]1127            invert:     optional argument. If specified as True,
1128                        return an inverted mask, i.e. the regions
1129                        specified are EXCLUDED
[1855]1130
[513]1131            row:        create the mask using the specified row for
1132                        unit conversions, default is row=0
1133                        only necessary if frequency varies over rows.
[1846]1134
1135        Examples::
1136
[113]1137            scan.set_unit('channel')
[1846]1138            # a)
[1118]1139            msk = scan.create_mask([400, 500], [800, 900])
[189]1140            # masks everything outside 400 and 500
[113]1141            # and 800 and 900 in the unit 'channel'
1142
[1846]1143            # b)
[1118]1144            msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]1145            # masks the regions between 400 and 500
[113]1146            # and 800 and 900 in the unit 'channel'
[1846]1147
1148            # c)
1149            #mask only channel 400
[1554]1150            msk =  scan.create_mask([400])
[1846]1151
[102]1152        """
[1554]1153        row = kwargs.get("row", 0)
[513]1154        data = self._getabcissa(row)
[113]1155        u = self._getcoordinfo()[0]
[1859]1156        if u == "":
1157            u = "channel"
1158        msg = "The current mask window unit is %s" % u
1159        i = self._check_ifs()
1160        if not i:
1161            msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1162        asaplog.push(msg)
[102]1163        n = self.nchan()
[1295]1164        msk = _n_bools(n, False)
[710]1165        # test if args is a 'list' or a 'normal *args - UGLY!!!
1166
[1118]1167        ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1168             and args or args[0]
[710]1169        for window in ws:
[1554]1170            if len(window) == 1:
1171                window = [window[0], window[0]]
1172            if len(window) == 0 or  len(window) > 2:
1173                raise ValueError("A window needs to be defined as [start(, end)]")
[1545]1174            if window[0] > window[1]:
1175                tmp = window[0]
1176                window[0] = window[1]
1177                window[1] = tmp
[102]1178            for i in range(n):
[1024]1179                if data[i] >= window[0] and data[i] <= window[1]:
[1295]1180                    msk[i] = True
[113]1181        if kwargs.has_key('invert'):
1182            if kwargs.get('invert'):
[1295]1183                msk = mask_not(msk)
[102]1184        return msk
[710]1185
[1931]1186    def get_masklist(self, mask=None, row=0, silent=False):
[1846]1187        """\
[1819]1188        Compute and return a list of mask windows, [min, max].
[1846]1189
[1819]1190        Parameters:
[1846]1191
[1819]1192            mask:       channel mask, created with create_mask.
[1855]1193
[1819]1194            row:        calcutate the masklist using the specified row
1195                        for unit conversions, default is row=0
1196                        only necessary if frequency varies over rows.
[1846]1197
[1819]1198        Returns:
[1846]1199
[1819]1200            [min, max], [min2, max2], ...
1201                Pairs of start/end points (inclusive)specifying
1202                the masked regions
[1846]1203
[1819]1204        """
1205        if not (isinstance(mask,list) or isinstance(mask, tuple)):
1206            raise TypeError("The mask should be list or tuple.")
1207        if len(mask) < 2:
1208            raise TypeError("The mask elements should be > 1")
1209        if self.nchan() != len(mask):
1210            msg = "Number of channels in scantable != number of mask elements"
1211            raise TypeError(msg)
1212        data = self._getabcissa(row)
1213        u = self._getcoordinfo()[0]
[1859]1214        if u == "":
1215            u = "channel"
1216        msg = "The current mask window unit is %s" % u
1217        i = self._check_ifs()
1218        if not i:
1219            msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
[1931]1220        if not silent:
1221            asaplog.push(msg)
[1819]1222        masklist=[]
1223        ist, ien = None, None
1224        ist, ien=self.get_mask_indices(mask)
1225        if ist is not None and ien is not None:
1226            for i in xrange(len(ist)):
1227                range=[data[ist[i]],data[ien[i]]]
1228                range.sort()
1229                masklist.append([range[0],range[1]])
1230        return masklist
1231
1232    def get_mask_indices(self, mask=None):
[1846]1233        """\
[1819]1234        Compute and Return lists of mask start indices and mask end indices.
[1855]1235
1236        Parameters:
1237
[1819]1238            mask:       channel mask, created with create_mask.
[1846]1239
[1819]1240        Returns:
[1846]1241
[1819]1242            List of mask start indices and that of mask end indices,
1243            i.e., [istart1,istart2,....], [iend1,iend2,....].
[1846]1244
[1819]1245        """
1246        if not (isinstance(mask,list) or isinstance(mask, tuple)):
1247            raise TypeError("The mask should be list or tuple.")
1248        if len(mask) < 2:
1249            raise TypeError("The mask elements should be > 1")
1250        istart=[]
1251        iend=[]
1252        if mask[0]: istart.append(0)
1253        for i in range(len(mask)-1):
1254            if not mask[i] and mask[i+1]:
1255                istart.append(i+1)
1256            elif mask[i] and not mask[i+1]:
1257                iend.append(i)
1258        if mask[len(mask)-1]: iend.append(len(mask)-1)
1259        if len(istart) != len(iend):
1260            raise RuntimeError("Numbers of mask start != mask end.")
1261        for i in range(len(istart)):
1262            if istart[i] > iend[i]:
1263                raise RuntimeError("Mask start index > mask end index")
1264                break
1265        return istart,iend
1266
1267#    def get_restfreqs(self):
1268#        """
1269#        Get the restfrequency(s) stored in this scantable.
1270#        The return value(s) are always of unit 'Hz'
1271#        Parameters:
1272#            none
1273#        Returns:
1274#            a list of doubles
1275#        """
1276#        return list(self._getrestfreqs())
1277
1278    def get_restfreqs(self, ids=None):
[1846]1279        """\
[256]1280        Get the restfrequency(s) stored in this scantable.
1281        The return value(s) are always of unit 'Hz'
[1846]1282
[256]1283        Parameters:
[1846]1284
[1819]1285            ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1286                 be retrieved
[1846]1287
[256]1288        Returns:
[1846]1289
[1819]1290            dictionary containing ids and a list of doubles for each id
[1846]1291
[256]1292        """
[1819]1293        if ids is None:
1294            rfreqs={}
1295            idlist = self.getmolnos()
1296            for i in idlist:
1297                rfreqs[i]=list(self._getrestfreqs(i))
1298            return rfreqs
1299        else:
1300            if type(ids)==list or type(ids)==tuple:
1301                rfreqs={}
1302                for i in ids:
1303                    rfreqs[i]=list(self._getrestfreqs(i))
1304                return rfreqs
1305            else:
1306                return list(self._getrestfreqs(ids))
1307            #return list(self._getrestfreqs(ids))
[102]1308
[931]1309    def set_restfreqs(self, freqs=None, unit='Hz'):
[1846]1310        """\
[931]1311        Set or replace the restfrequency specified and
[1938]1312        if the 'freqs' argument holds a scalar,
[931]1313        then that rest frequency will be applied to all the selected
1314        data.  If the 'freqs' argument holds
1315        a vector, then it MUST be of equal or smaller length than
1316        the number of IFs (and the available restfrequencies will be
1317        replaced by this vector).  In this case, *all* data have
1318        the restfrequency set per IF according
1319        to the corresponding value you give in the 'freqs' vector.
[1118]1320        E.g. 'freqs=[1e9, 2e9]'  would mean IF 0 gets restfreq 1e9 and
[931]1321        IF 1 gets restfreq 2e9.
[1846]1322
[1395]1323        You can also specify the frequencies via a linecatalog.
[1153]1324
[931]1325        Parameters:
[1846]1326
[931]1327            freqs:   list of rest frequency values or string idenitfiers
[1855]1328
[931]1329            unit:    unit for rest frequency (default 'Hz')
[402]1330
[1846]1331
1332        Example::
1333
[1819]1334            # set the given restfrequency for the all currently selected IFs
[931]1335            scan.set_restfreqs(freqs=1.4e9)
[1845]1336            # set restfrequencies for the n IFs  (n > 1) in the order of the
1337            # list, i.e
1338            # IF0 -> 1.4e9, IF1 ->  1.41e9, IF3 -> 1.42e9
1339            # len(list_of_restfreqs) == nIF
1340            # for nIF == 1 the following will set multiple restfrequency for
1341            # that IF
[1819]1342            scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
[1845]1343            # set multiple restfrequencies per IF. as a list of lists where
1344            # the outer list has nIF elements, the inner s arbitrary
1345            scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
[391]1346
[1846]1347       *Note*:
[1845]1348
[931]1349            To do more sophisticate Restfrequency setting, e.g. on a
1350            source and IF basis, use scantable.set_selection() before using
[1846]1351            this function::
[931]1352
[1846]1353                # provided your scantable is called scan
1354                selection = selector()
1355                selection.set_name("ORION*")
1356                selection.set_ifs([1])
1357                scan.set_selection(selection)
1358                scan.set_restfreqs(freqs=86.6e9)
1359
[931]1360        """
1361        varlist = vars()
[1157]1362        from asap import linecatalog
1363        # simple  value
[1118]1364        if isinstance(freqs, int) or isinstance(freqs, float):
[1845]1365            self._setrestfreqs([freqs], [""], unit)
[1157]1366        # list of values
[1118]1367        elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]1368            # list values are scalars
[1118]1369            if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[1845]1370                if len(freqs) == 1:
1371                    self._setrestfreqs(freqs, [""], unit)
1372                else:
1373                    # allow the 'old' mode of setting mulitple IFs
1374                    sel = selector()
1375                    savesel = self._getselection()
1376                    iflist = self.getifnos()
1377                    if len(freqs)>len(iflist):
1378                        raise ValueError("number of elements in list of list "
1379                                         "exeeds the current IF selections")
1380                    iflist = self.getifnos()
1381                    for i, fval in enumerate(freqs):
1382                        sel.set_ifs(iflist[i])
1383                        self._setselection(sel)
1384                        self._setrestfreqs([fval], [""], unit)
1385                    self._setselection(savesel)
1386
1387            # list values are dict, {'value'=, 'name'=)
[1157]1388            elif isinstance(freqs[-1], dict):
[1845]1389                values = []
1390                names = []
1391                for d in freqs:
1392                    values.append(d["value"])
1393                    names.append(d["name"])
1394                self._setrestfreqs(values, names, unit)
[1819]1395            elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
[1157]1396                sel = selector()
1397                savesel = self._getselection()
[1322]1398                iflist = self.getifnos()
[1819]1399                if len(freqs)>len(iflist):
[1845]1400                    raise ValueError("number of elements in list of list exeeds"
1401                                     " the current IF selections")
1402                for i, fval in enumerate(freqs):
[1322]1403                    sel.set_ifs(iflist[i])
[1259]1404                    self._setselection(sel)
[1845]1405                    self._setrestfreqs(fval, [""], unit)
[1157]1406                self._setselection(savesel)
1407        # freqs are to be taken from a linecatalog
[1153]1408        elif isinstance(freqs, linecatalog):
1409            sel = selector()
1410            savesel = self._getselection()
1411            for i in xrange(freqs.nrow()):
[1322]1412                sel.set_ifs(iflist[i])
[1153]1413                self._setselection(sel)
[1845]1414                self._setrestfreqs([freqs.get_frequency(i)],
1415                                   [freqs.get_name(i)], "MHz")
[1153]1416                # ensure that we are not iterating past nIF
1417                if i == self.nif()-1: break
1418            self._setselection(savesel)
[931]1419        else:
1420            return
1421        self._add_history("set_restfreqs", varlist)
1422
[1360]1423    def shift_refpix(self, delta):
[1846]1424        """\
[1589]1425        Shift the reference pixel of the Spectra Coordinate by an
1426        integer amount.
[1846]1427
[1589]1428        Parameters:
[1846]1429
[1589]1430            delta:   the amount to shift by
[1846]1431
1432        *Note*:
1433
[1589]1434            Be careful using this with broadband data.
[1846]1435
[1360]1436        """
[1731]1437        Scantable.shift_refpix(self, delta)
[931]1438
[1862]1439    @asaplog_post_dec
[1259]1440    def history(self, filename=None):
[1846]1441        """\
[1259]1442        Print the history. Optionally to a file.
[1846]1443
[1348]1444        Parameters:
[1846]1445
[1928]1446            filename:    The name of the file to save the history to.
[1846]1447
[1259]1448        """
[484]1449        hist = list(self._gethistory())
[794]1450        out = "-"*80
[484]1451        for h in hist:
[489]1452            if h.startswith("---"):
[1857]1453                out = "\n".join([out, h])
[489]1454            else:
1455                items = h.split("##")
1456                date = items[0]
1457                func = items[1]
1458                items = items[2:]
[794]1459                out += "\n"+date+"\n"
1460                out += "Function: %s\n  Parameters:" % (func)
[489]1461                for i in items:
[1938]1462                    if i == '':
1463                        continue
[489]1464                    s = i.split("=")
[1118]1465                    out += "\n   %s = %s" % (s[0], s[1])
[1857]1466                out = "\n".join([out, "-"*80])
[1259]1467        if filename is not None:
1468            if filename is "":
1469                filename = 'scantable_history.txt'
1470            import os
1471            filename = os.path.expandvars(os.path.expanduser(filename))
1472            if not os.path.isdir(filename):
1473                data = open(filename, 'w')
1474                data.write(out)
1475                data.close()
1476            else:
1477                msg = "Illegal file name '%s'." % (filename)
[1859]1478                raise IOError(msg)
1479        return page(out)
[513]1480    #
1481    # Maths business
1482    #
[1862]1483    @asaplog_post_dec
[931]1484    def average_time(self, mask=None, scanav=False, weight='tint', align=False):
[1846]1485        """\
[1070]1486        Return the (time) weighted average of a scan.
[1846]1487
1488        *Note*:
1489
[1070]1490            in channels only - align if necessary
[1846]1491
[513]1492        Parameters:
[1846]1493
[513]1494            mask:     an optional mask (only used for 'var' and 'tsys'
1495                      weighting)
[1855]1496
[558]1497            scanav:   True averages each scan separately
1498                      False (default) averages all scans together,
[1855]1499
[1099]1500            weight:   Weighting scheme.
1501                      'none'     (mean no weight)
1502                      'var'      (1/var(spec) weighted)
1503                      'tsys'     (1/Tsys**2 weighted)
1504                      'tint'     (integration time weighted)
1505                      'tintsys'  (Tint/Tsys**2)
1506                      'median'   ( median averaging)
[535]1507                      The default is 'tint'
[1855]1508
[931]1509            align:    align the spectra in velocity before averaging. It takes
1510                      the time of the first spectrum as reference time.
[1846]1511
1512        Example::
1513
[513]1514            # time average the scantable without using a mask
[710]1515            newscan = scan.average_time()
[1846]1516
[513]1517        """
1518        varlist = vars()
[1593]1519        weight = weight or 'TINT'
1520        mask = mask or ()
1521        scanav = (scanav and 'SCAN') or 'NONE'
[1118]1522        scan = (self, )
[1859]1523
1524        if align:
1525            scan = (self.freq_align(insitu=False), )
1526        s = None
1527        if weight.upper() == 'MEDIAN':
1528            s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1529                                                     scanav))
1530        else:
1531            s = scantable(self._math._average(scan, mask, weight.upper(),
1532                          scanav))
[1099]1533        s._add_history("average_time", varlist)
[513]1534        return s
[710]1535
[1862]1536    @asaplog_post_dec
[876]1537    def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[1846]1538        """\
[513]1539        Return a scan where all spectra are converted to either
1540        Jansky or Kelvin depending upon the flux units of the scan table.
1541        By default the function tries to look the values up internally.
1542        If it can't find them (or if you want to over-ride), you must
1543        specify EITHER jyperk OR eta (and D which it will try to look up
1544        also if you don't set it). jyperk takes precedence if you set both.
[1846]1545
[513]1546        Parameters:
[1846]1547
[513]1548            jyperk:      the Jy / K conversion factor
[1855]1549
[513]1550            eta:         the aperture efficiency
[1855]1551
[1928]1552            d:           the geometric diameter (metres)
[1855]1553
[513]1554            insitu:      if False a new scantable is returned.
1555                         Otherwise, the scaling is done in-situ
1556                         The default is taken from .asaprc (False)
[1846]1557
[513]1558        """
1559        if insitu is None: insitu = rcParams['insitu']
[876]1560        self._math._setinsitu(insitu)
[513]1561        varlist = vars()
[1593]1562        jyperk = jyperk or -1.0
1563        d = d or -1.0
1564        eta = eta or -1.0
[876]1565        s = scantable(self._math._convertflux(self, d, eta, jyperk))
1566        s._add_history("convert_flux", varlist)
1567        if insitu: self._assign(s)
1568        else: return s
[513]1569
[1862]1570    @asaplog_post_dec
[876]1571    def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[1846]1572        """\
[513]1573        Return a scan after applying a gain-elevation correction.
1574        The correction can be made via either a polynomial or a
1575        table-based interpolation (and extrapolation if necessary).
1576        You specify polynomial coefficients, an ascii table or neither.
1577        If you specify neither, then a polynomial correction will be made
1578        with built in coefficients known for certain telescopes (an error
1579        will occur if the instrument is not known).
1580        The data and Tsys are *divided* by the scaling factors.
[1846]1581
[513]1582        Parameters:
[1846]1583
[513]1584            poly:        Polynomial coefficients (default None) to compute a
1585                         gain-elevation correction as a function of
1586                         elevation (in degrees).
[1855]1587
[513]1588            filename:    The name of an ascii file holding correction factors.
1589                         The first row of the ascii file must give the column
1590                         names and these MUST include columns
1591                         "ELEVATION" (degrees) and "FACTOR" (multiply data
1592                         by this) somewhere.
1593                         The second row must give the data type of the
1594                         column. Use 'R' for Real and 'I' for Integer.
1595                         An example file would be
1596                         (actual factors are arbitrary) :
1597
1598                         TIME ELEVATION FACTOR
1599                         R R R
1600                         0.1 0 0.8
1601                         0.2 20 0.85
1602                         0.3 40 0.9
1603                         0.4 60 0.85
1604                         0.5 80 0.8
1605                         0.6 90 0.75
[1855]1606
[513]1607            method:      Interpolation method when correcting from a table.
1608                         Values are  "nearest", "linear" (default), "cubic"
1609                         and "spline"
[1855]1610
[513]1611            insitu:      if False a new scantable is returned.
1612                         Otherwise, the scaling is done in-situ
1613                         The default is taken from .asaprc (False)
[1846]1614
[513]1615        """
1616
1617        if insitu is None: insitu = rcParams['insitu']
[876]1618        self._math._setinsitu(insitu)
[513]1619        varlist = vars()
[1593]1620        poly = poly or ()
[513]1621        from os.path import expandvars
1622        filename = expandvars(filename)
[876]1623        s = scantable(self._math._gainel(self, poly, filename, method))
1624        s._add_history("gain_el", varlist)
[1593]1625        if insitu:
1626            self._assign(s)
1627        else:
1628            return s
[710]1629
[1862]1630    @asaplog_post_dec
[931]1631    def freq_align(self, reftime=None, method='cubic', insitu=None):
[1846]1632        """\
[513]1633        Return a scan where all rows have been aligned in frequency/velocity.
1634        The alignment frequency frame (e.g. LSRK) is that set by function
1635        set_freqframe.
[1846]1636
[513]1637        Parameters:
[1855]1638
[513]1639            reftime:     reference time to align at. By default, the time of
1640                         the first row of data is used.
[1855]1641
[513]1642            method:      Interpolation method for regridding the spectra.
1643                         Choose from "nearest", "linear", "cubic" (default)
1644                         and "spline"
[1855]1645
[513]1646            insitu:      if False a new scantable is returned.
1647                         Otherwise, the scaling is done in-situ
1648                         The default is taken from .asaprc (False)
[1846]1649
[513]1650        """
[931]1651        if insitu is None: insitu = rcParams["insitu"]
[876]1652        self._math._setinsitu(insitu)
[513]1653        varlist = vars()
[1593]1654        reftime = reftime or ""
[931]1655        s = scantable(self._math._freq_align(self, reftime, method))
[876]1656        s._add_history("freq_align", varlist)
1657        if insitu: self._assign(s)
1658        else: return s
[513]1659
[1862]1660    @asaplog_post_dec
[1725]1661    def opacity(self, tau=None, insitu=None):
[1846]1662        """\
[513]1663        Apply an opacity correction. The data
1664        and Tsys are multiplied by the correction factor.
[1846]1665
[513]1666        Parameters:
[1855]1667
[1689]1668            tau:         (list of) opacity from which the correction factor is
[513]1669                         exp(tau*ZD)
[1689]1670                         where ZD is the zenith-distance.
1671                         If a list is provided, it has to be of length nIF,
1672                         nIF*nPol or 1 and in order of IF/POL, e.g.
1673                         [opif0pol0, opif0pol1, opif1pol0 ...]
[1725]1674                         if tau is `None` the opacities are determined from a
1675                         model.
[1855]1676
[513]1677            insitu:      if False a new scantable is returned.
1678                         Otherwise, the scaling is done in-situ
1679                         The default is taken from .asaprc (False)
[1846]1680
[513]1681        """
1682        if insitu is None: insitu = rcParams['insitu']
[876]1683        self._math._setinsitu(insitu)
[513]1684        varlist = vars()
[1689]1685        if not hasattr(tau, "__len__"):
1686            tau = [tau]
[876]1687        s = scantable(self._math._opacity(self, tau))
1688        s._add_history("opacity", varlist)
1689        if insitu: self._assign(s)
1690        else: return s
[513]1691
[1862]1692    @asaplog_post_dec
[513]1693    def bin(self, width=5, insitu=None):
[1846]1694        """\
[513]1695        Return a scan where all spectra have been binned up.
[1846]1696
[1348]1697        Parameters:
[1846]1698
[513]1699            width:       The bin width (default=5) in pixels
[1855]1700
[513]1701            insitu:      if False a new scantable is returned.
1702                         Otherwise, the scaling is done in-situ
1703                         The default is taken from .asaprc (False)
[1846]1704
[513]1705        """
1706        if insitu is None: insitu = rcParams['insitu']
[876]1707        self._math._setinsitu(insitu)
[513]1708        varlist = vars()
[876]1709        s = scantable(self._math._bin(self, width))
[1118]1710        s._add_history("bin", varlist)
[1589]1711        if insitu:
1712            self._assign(s)
1713        else:
1714            return s
[513]1715
[1862]1716    @asaplog_post_dec
[513]1717    def resample(self, width=5, method='cubic', insitu=None):
[1846]1718        """\
[1348]1719        Return a scan where all spectra have been binned up.
[1573]1720
[1348]1721        Parameters:
[1846]1722
[513]1723            width:       The bin width (default=5) in pixels
[1855]1724
[513]1725            method:      Interpolation method when correcting from a table.
1726                         Values are  "nearest", "linear", "cubic" (default)
1727                         and "spline"
[1855]1728
[513]1729            insitu:      if False a new scantable is returned.
1730                         Otherwise, the scaling is done in-situ
1731                         The default is taken from .asaprc (False)
[1846]1732
[513]1733        """
1734        if insitu is None: insitu = rcParams['insitu']
[876]1735        self._math._setinsitu(insitu)
[513]1736        varlist = vars()
[876]1737        s = scantable(self._math._resample(self, method, width))
[1118]1738        s._add_history("resample", varlist)
[876]1739        if insitu: self._assign(s)
1740        else: return s
[513]1741
[1862]1742    @asaplog_post_dec
[946]1743    def average_pol(self, mask=None, weight='none'):
[1846]1744        """\
[946]1745        Average the Polarisations together.
[1846]1746
[946]1747        Parameters:
[1846]1748
[946]1749            mask:        An optional mask defining the region, where the
1750                         averaging will be applied. The output will have all
1751                         specified points masked.
[1855]1752
[946]1753            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1754                         weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]1755
[946]1756        """
1757        varlist = vars()
[1593]1758        mask = mask or ()
[1010]1759        s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]1760        s._add_history("average_pol", varlist)
[992]1761        return s
[513]1762
[1862]1763    @asaplog_post_dec
[1145]1764    def average_beam(self, mask=None, weight='none'):
[1846]1765        """\
[1145]1766        Average the Beams together.
[1846]1767
[1145]1768        Parameters:
1769            mask:        An optional mask defining the region, where the
1770                         averaging will be applied. The output will have all
1771                         specified points masked.
[1855]1772
[1145]1773            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1774                         weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]1775
[1145]1776        """
1777        varlist = vars()
[1593]1778        mask = mask or ()
[1145]1779        s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1780        s._add_history("average_beam", varlist)
1781        return s
1782
[1586]1783    def parallactify(self, pflag):
[1846]1784        """\
[1843]1785        Set a flag to indicate whether this data should be treated as having
[1617]1786        been 'parallactified' (total phase == 0.0)
[1846]1787
[1617]1788        Parameters:
[1855]1789
[1843]1790            pflag:  Bool indicating whether to turn this on (True) or
[1617]1791                    off (False)
[1846]1792
[1617]1793        """
[1586]1794        varlist = vars()
1795        self._parallactify(pflag)
1796        self._add_history("parallactify", varlist)
1797
[1862]1798    @asaplog_post_dec
[992]1799    def convert_pol(self, poltype=None):
[1846]1800        """\
[992]1801        Convert the data to a different polarisation type.
[1565]1802        Note that you will need cross-polarisation terms for most conversions.
[1846]1803
[992]1804        Parameters:
[1855]1805
[992]1806            poltype:    The new polarisation type. Valid types are:
[1565]1807                        "linear", "circular", "stokes" and "linpol"
[1846]1808
[992]1809        """
1810        varlist = vars()
[1859]1811        s = scantable(self._math._convertpol(self, poltype))
[1118]1812        s._add_history("convert_pol", varlist)
[992]1813        return s
1814
[1862]1815    @asaplog_post_dec
[1819]1816    def smooth(self, kernel="hanning", width=5.0, order=2, plot=False, insitu=None):
[1846]1817        """\
[513]1818        Smooth the spectrum by the specified kernel (conserving flux).
[1846]1819
[513]1820        Parameters:
[1846]1821
[513]1822            kernel:     The type of smoothing kernel. Select from
[1574]1823                        'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1824                        or 'poly'
[1855]1825
[513]1826            width:      The width of the kernel in pixels. For hanning this is
1827                        ignored otherwise it defauls to 5 pixels.
1828                        For 'gaussian' it is the Full Width Half
1829                        Maximum. For 'boxcar' it is the full width.
[1574]1830                        For 'rmedian' and 'poly' it is the half width.
[1855]1831
[1574]1832            order:      Optional parameter for 'poly' kernel (default is 2), to
1833                        specify the order of the polnomial. Ignored by all other
1834                        kernels.
[1855]1835
[1819]1836            plot:       plot the original and the smoothed spectra.
1837                        In this each indivual fit has to be approved, by
1838                        typing 'y' or 'n'
[1855]1839
[513]1840            insitu:     if False a new scantable is returned.
1841                        Otherwise, the scaling is done in-situ
1842                        The default is taken from .asaprc (False)
[1846]1843
[513]1844        """
1845        if insitu is None: insitu = rcParams['insitu']
[876]1846        self._math._setinsitu(insitu)
[513]1847        varlist = vars()
[1819]1848
1849        if plot: orgscan = self.copy()
1850
[1574]1851        s = scantable(self._math._smooth(self, kernel.lower(), width, order))
[876]1852        s._add_history("smooth", varlist)
[1819]1853
1854        if plot:
1855            if rcParams['plotter.gui']:
1856                from asap.asaplotgui import asaplotgui as asaplot
1857            else:
1858                from asap.asaplot import asaplot
1859            self._p=asaplot()
1860            self._p.set_panels()
1861            ylab=s._get_ordinate_label()
1862            #self._p.palette(0,["#777777","red"])
1863            for r in xrange(s.nrow()):
1864                xsm=s._getabcissa(r)
1865                ysm=s._getspectrum(r)
1866                xorg=orgscan._getabcissa(r)
1867                yorg=orgscan._getspectrum(r)
1868                self._p.clear()
1869                self._p.hold()
1870                self._p.set_axes('ylabel',ylab)
1871                self._p.set_axes('xlabel',s._getabcissalabel(r))
1872                self._p.set_axes('title',s._getsourcename(r))
1873                self._p.set_line(label='Original',color="#777777")
1874                self._p.plot(xorg,yorg)
1875                self._p.set_line(label='Smoothed',color="red")
1876                self._p.plot(xsm,ysm)
1877                ### Ugly part for legend
1878                for i in [0,1]:
1879                    self._p.subplots[0]['lines'].append([self._p.subplots[0]['axes'].lines[i]])
1880                self._p.release()
1881                ### Ugly part for legend
1882                self._p.subplots[0]['lines']=[]
1883                res = raw_input("Accept smoothing ([y]/n): ")
1884                if res.upper() == 'N':
1885                    s._setspectrum(yorg, r)
1886            self._p.unmap()
1887            self._p = None
1888            del orgscan
1889
[876]1890        if insitu: self._assign(s)
1891        else: return s
[513]1892
[1862]1893    @asaplog_post_dec
[1907]1894    def old_poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None, rows=None):
[1846]1895        """\
[513]1896        Return a scan which has been baselined (all rows) by a polynomial.
[1907]1897       
[513]1898        Parameters:
[1846]1899
[794]1900            mask:       an optional mask
[1855]1901
[794]1902            order:      the order of the polynomial (default is 0)
[1855]1903
[1061]1904            plot:       plot the fit and the residual. In this each
1905                        indivual fit has to be approved, by typing 'y'
1906                        or 'n'
[1855]1907
[1391]1908            uselin:     use linear polynomial fit
[1855]1909
[794]1910            insitu:     if False a new scantable is returned.
1911                        Otherwise, the scaling is done in-situ
1912                        The default is taken from .asaprc (False)
[1846]1913
[1907]1914            rows:       row numbers of spectra to be processed.
1915                        (default is None: for all rows)
1916       
1917        Example:
[513]1918            # return a scan baselined by a third order polynomial,
1919            # not using a mask
1920            bscan = scan.poly_baseline(order=3)
[1846]1921
[579]1922        """
[513]1923        if insitu is None: insitu = rcParams['insitu']
[1819]1924        if not insitu:
1925            workscan = self.copy()
1926        else:
1927            workscan = self
[513]1928        varlist = vars()
1929        if mask is None:
[1907]1930            mask = [True for i in xrange(self.nchan())]
[1819]1931
[1217]1932        try:
1933            f = fitter()
[1391]1934            if uselin:
1935                f.set_function(lpoly=order)
1936            else:
1937                f.set_function(poly=order)
[1819]1938
[1907]1939            if rows == None:
1940                rows = xrange(workscan.nrow())
1941            elif isinstance(rows, int):
1942                rows = [ rows ]
1943           
[1819]1944            if len(rows) > 0:
1945                self.blpars = []
[1907]1946                self.masklists = []
1947                self.actualmask = []
1948           
[1819]1949            for r in rows:
1950                f.x = workscan._getabcissa(r)
1951                f.y = workscan._getspectrum(r)
[1907]1952                f.mask = mask_and(mask, workscan._getmask(r))    # (CAS-1434)
[1819]1953                f.data = None
1954                f.fit()
1955                if plot:
1956                    f.plot(residual=True)
1957                    x = raw_input("Accept fit ( [y]/n ): ")
1958                    if x.upper() == 'N':
1959                        self.blpars.append(None)
[1907]1960                        self.masklists.append(None)
1961                        self.actualmask.append(None)
[1819]1962                        continue
1963                workscan._setspectrum(f.fitter.getresidual(), r)
1964                self.blpars.append(f.get_parameters())
[1931]1965                self.masklists.append(workscan.get_masklist(f.mask, row=r, silent=True))
[1907]1966                self.actualmask.append(f.mask)
[1819]1967
1968            if plot:
1969                f._p.unmap()
1970                f._p = None
1971            workscan._add_history("poly_baseline", varlist)
[1856]1972            if insitu:
1973                self._assign(workscan)
1974            else:
1975                return workscan
[1217]1976        except RuntimeError:
1977            msg = "The fit failed, possibly because it didn't converge."
[1859]1978            raise RuntimeError(msg)
[513]1979
[1931]1980    @asaplog_post_dec
[1907]1981    def poly_baseline(self, mask=None, order=0, plot=False, batch=False, insitu=None, rows=None):
1982        """\
1983        Return a scan which has been baselined (all rows) by a polynomial.
1984        Parameters:
1985            mask:       an optional mask
1986            order:      the order of the polynomial (default is 0)
1987            plot:       plot the fit and the residual. In this each
1988                        indivual fit has to be approved, by typing 'y'
1989                        or 'n'. Ignored if batch = True.
1990            batch:      if True a faster algorithm is used and logs
1991                        including the fit results are not output
1992                        (default is False)
1993            insitu:     if False a new scantable is returned.
1994                        Otherwise, the scaling is done in-situ
1995                        The default is taken from .asaprc (False)
[1931]1996            rows:       row numbers of spectra to be baselined.
[1907]1997                        (default is None: for all rows)
1998        Example:
1999            # return a scan baselined by a third order polynomial,
2000            # not using a mask
2001            bscan = scan.poly_baseline(order=3)
2002        """
[1931]2003       
2004        varlist = vars()
2005       
[1907]2006        if insitu is None: insitu = rcParams["insitu"]
2007        if insitu:
2008            workscan = self
2009        else:
2010            workscan = self.copy()
2011
2012        nchan = workscan.nchan()
2013       
2014        if mask is None:
2015            mask = [True for i in xrange(nchan)]
2016
2017        try:
2018            if rows == None:
2019                rows = xrange(workscan.nrow())
2020            elif isinstance(rows, int):
2021                rows = [ rows ]
2022           
2023            if len(rows) > 0:
[1931]2024                workscan.blpars = []
2025                workscan.masklists = []
2026                workscan.actualmask = []
[1907]2027
2028            if batch:
[1931]2029                workscan._poly_baseline_batch(mask, order)
[1907]2030            elif plot:
2031                f = fitter()
2032                f.set_function(lpoly=order)
2033                for r in rows:
2034                    f.x = workscan._getabcissa(r)
2035                    f.y = workscan._getspectrum(r)
2036                    f.mask = mask_and(mask, workscan._getmask(r))    # (CAS-1434)
2037                    f.data = None
2038                    f.fit()
2039                   
2040                    f.plot(residual=True)
2041                    accept_fit = raw_input("Accept fit ( [y]/n ): ")
2042                    if accept_fit.upper() == "N":
2043                        self.blpars.append(None)
2044                        self.masklists.append(None)
2045                        self.actualmask.append(None)
2046                        continue
2047                    workscan._setspectrum(f.fitter.getresidual(), r)
[1931]2048                    workscan.blpars.append(f.get_parameters())
2049                    workscan.masklists.append(workscan.get_masklist(f.mask, row=r))
2050                    workscan.actualmask.append(f.mask)
[1907]2051                   
2052                f._p.unmap()
2053                f._p = None
2054            else:
2055                for r in rows:
[1931]2056                    fitparams = workscan._poly_baseline(mask, order, r)
2057                    params = fitparams.getparameters()
2058                    fmtd = ", ".join(["p%d = %3.6f" % (i, v) for i, v in enumerate(params)])
2059                    errors = fitparams.geterrors()
2060                    fmask = mask_and(mask, workscan._getmask(r))
2061
2062                    workscan.blpars.append({"params":params,
2063                                            "fixed": fitparams.getfixedparameters(),
2064                                            "formatted":fmtd, "errors":errors})
2065                    workscan.masklists.append(workscan.get_masklist(fmask, r, silent=True))
2066                    workscan.actualmask.append(fmask)
[1907]2067                   
[1931]2068                    asaplog.push(fmtd)
[1907]2069           
2070            workscan._add_history("poly_baseline", varlist)
2071           
2072            if insitu:
2073                self._assign(workscan)
2074            else:
2075                return workscan
2076           
[1919]2077        except RuntimeError, e:
[1907]2078            msg = "The fit failed, possibly because it didn't converge."
2079            if rcParams["verbose"]:
[1919]2080                asaplog.push(str(e))
[1907]2081                asaplog.push(str(msg))
2082                return
2083            else:
[1919]2084                raise RuntimeError(str(e)+'\n'+msg)
[1907]2085
2086
2087    def auto_poly_baseline(self, mask=None, edge=(0, 0), order=0,
[1280]2088                           threshold=3, chan_avg_limit=1, plot=False,
[1907]2089                           insitu=None, rows=None):
[1846]2090        """\
[1931]2091        Return a scan which has been baselined (all rows) by a polynomial.
[880]2092        Spectral lines are detected first using linefinder and masked out
2093        to avoid them affecting the baseline solution.
2094
2095        Parameters:
[1846]2096
[880]2097            mask:       an optional mask retreived from scantable
[1846]2098
2099            edge:       an optional number of channel to drop at the edge of
2100                        spectrum. If only one value is
[880]2101                        specified, the same number will be dropped from
2102                        both sides of the spectrum. Default is to keep
[907]2103                        all channels. Nested tuples represent individual
[976]2104                        edge selection for different IFs (a number of spectral
2105                        channels can be different)
[1846]2106
[880]2107            order:      the order of the polynomial (default is 0)
[1846]2108
[880]2109            threshold:  the threshold used by line finder. It is better to
2110                        keep it large as only strong lines affect the
2111                        baseline solution.
[1846]2112
[1280]2113            chan_avg_limit:
2114                        a maximum number of consequtive spectral channels to
2115                        average during the search of weak and broad lines.
2116                        The default is no averaging (and no search for weak
2117                        lines). If such lines can affect the fitted baseline
2118                        (e.g. a high order polynomial is fitted), increase this
2119                        parameter (usually values up to 8 are reasonable). Most
2120                        users of this method should find the default value
2121                        sufficient.
[1846]2122
[1061]2123            plot:       plot the fit and the residual. In this each
2124                        indivual fit has to be approved, by typing 'y'
2125                        or 'n'
[1846]2126
[880]2127            insitu:     if False a new scantable is returned.
2128                        Otherwise, the scaling is done in-situ
2129                        The default is taken from .asaprc (False)
[1907]2130            rows:       row numbers of spectra to be processed.
2131                        (default is None: for all rows)
[880]2132
[1846]2133
2134        Example::
2135
2136            scan2 = scan.auto_poly_baseline(order=7, insitu=False)
2137
[880]2138        """
2139        if insitu is None: insitu = rcParams['insitu']
2140        varlist = vars()
2141        from asap.asaplinefind import linefinder
2142        from asap import _is_sequence_or_number as _is_valid
2143
[976]2144        # check whether edge is set up for each IF individually
[1118]2145        individualedge = False;
2146        if len(edge) > 1:
2147            if isinstance(edge[0], list) or isinstance(edge[0], tuple):
2148                individualedge = True;
[907]2149
[1118]2150        if not _is_valid(edge, int) and not individualedge:
[909]2151            raise ValueError, "Parameter 'edge' has to be an integer or a \
[907]2152            pair of integers specified as a tuple. Nested tuples are allowed \
2153            to make individual selection for different IFs."
[919]2154
[1118]2155        curedge = (0, 0)
2156        if individualedge:
2157            for edgepar in edge:
2158                if not _is_valid(edgepar, int):
2159                    raise ValueError, "Each element of the 'edge' tuple has \
2160                                       to be a pair of integers or an integer."
[907]2161        else:
[1118]2162            curedge = edge;
[880]2163
[1907]2164        if not insitu:
2165            workscan = self.copy()
2166        else:
2167            workscan = self
2168
[880]2169        # setup fitter
2170        f = fitter()
[1907]2171        f.set_function(lpoly=order)
[880]2172
2173        # setup line finder
[1118]2174        fl = linefinder()
[1268]2175        fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
[880]2176
[907]2177        fl.set_scan(workscan)
2178
[1907]2179        if mask is None:
2180            mask = _n_bools(workscan.nchan(), True)
2181       
2182        if rows is None:
2183            rows = xrange(workscan.nrow())
2184        elif isinstance(rows, int):
2185            rows = [ rows ]
2186       
[1819]2187        # Save parameters of baseline fits & masklists as a class attribute.
2188        # NOTICE: It does not reflect changes in scantable!
2189        if len(rows) > 0:
2190            self.blpars=[]
2191            self.masklists=[]
[1907]2192            self.actualmask=[]
[880]2193        asaplog.push("Processing:")
2194        for r in rows:
[1118]2195            msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
2196                (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
2197                 workscan.getpol(r), workscan.getcycle(r))
[880]2198            asaplog.push(msg, False)
[907]2199
[976]2200            # figure out edge parameter
[1118]2201            if individualedge:
2202                if len(edge) >= workscan.getif(r):
2203                    raise RuntimeError, "Number of edge elements appear to " \
2204                                        "be less than the number of IFs"
2205                    curedge = edge[workscan.getif(r)]
[919]2206
[1907]2207            actualmask = mask_and(mask, workscan._getmask(r))    # (CAS-1434)
[1819]2208
[976]2209            # setup line finder
[1819]2210            fl.find_lines(r, actualmask, curedge)
[1907]2211           
[1819]2212            f.x = workscan._getabcissa(r)
2213            f.y = workscan._getspectrum(r)
[1907]2214            f.mask = fl.get_mask()
[1819]2215            f.data = None
[880]2216            f.fit()
[1819]2217
2218            # Show mask list
[1931]2219            masklist=workscan.get_masklist(f.mask, row=r, silent=True)
[1819]2220            msg = "mask range: "+str(masklist)
2221            asaplog.push(msg, False)
2222
[1061]2223            if plot:
2224                f.plot(residual=True)
2225                x = raw_input("Accept fit ( [y]/n ): ")
2226                if x.upper() == 'N':
[1819]2227                    self.blpars.append(None)
2228                    self.masklists.append(None)
[1907]2229                    self.actualmask.append(None)
[1061]2230                    continue
[1819]2231
[880]2232            workscan._setspectrum(f.fitter.getresidual(), r)
[1819]2233            self.blpars.append(f.get_parameters())
2234            self.masklists.append(masklist)
[1907]2235            self.actualmask.append(f.mask)
[1061]2236        if plot:
2237            f._p.unmap()
2238            f._p = None
2239        workscan._add_history("auto_poly_baseline", varlist)
[880]2240        if insitu:
2241            self._assign(workscan)
2242        else:
2243            return workscan
2244
[1862]2245    @asaplog_post_dec
[914]2246    def rotate_linpolphase(self, angle):
[1846]2247        """\
[914]2248        Rotate the phase of the complex polarization O=Q+iU correlation.
2249        This is always done in situ in the raw data.  So if you call this
2250        function more than once then each call rotates the phase further.
[1846]2251
[914]2252        Parameters:
[1846]2253
[914]2254            angle:   The angle (degrees) to rotate (add) by.
[1846]2255
2256        Example::
2257
[914]2258            scan.rotate_linpolphase(2.3)
[1846]2259
[914]2260        """
2261        varlist = vars()
[936]2262        self._math._rotate_linpolphase(self, angle)
[914]2263        self._add_history("rotate_linpolphase", varlist)
2264        return
[710]2265
[1862]2266    @asaplog_post_dec
[914]2267    def rotate_xyphase(self, angle):
[1846]2268        """\
[914]2269        Rotate the phase of the XY correlation.  This is always done in situ
2270        in the data.  So if you call this function more than once
2271        then each call rotates the phase further.
[1846]2272
[914]2273        Parameters:
[1846]2274
[914]2275            angle:   The angle (degrees) to rotate (add) by.
[1846]2276
2277        Example::
2278
[914]2279            scan.rotate_xyphase(2.3)
[1846]2280
[914]2281        """
2282        varlist = vars()
[936]2283        self._math._rotate_xyphase(self, angle)
[914]2284        self._add_history("rotate_xyphase", varlist)
2285        return
2286
[1862]2287    @asaplog_post_dec
[914]2288    def swap_linears(self):
[1846]2289        """\
[1573]2290        Swap the linear polarisations XX and YY, or better the first two
[1348]2291        polarisations as this also works for ciculars.
[914]2292        """
2293        varlist = vars()
[936]2294        self._math._swap_linears(self)
[914]2295        self._add_history("swap_linears", varlist)
2296        return
2297
[1862]2298    @asaplog_post_dec
[914]2299    def invert_phase(self):
[1846]2300        """\
[914]2301        Invert the phase of the complex polarisation
2302        """
2303        varlist = vars()
[936]2304        self._math._invert_phase(self)
[914]2305        self._add_history("invert_phase", varlist)
2306        return
2307
[1862]2308    @asaplog_post_dec
[876]2309    def add(self, offset, insitu=None):
[1846]2310        """\
[513]2311        Return a scan where all spectra have the offset added
[1846]2312
[513]2313        Parameters:
[1846]2314
[513]2315            offset:      the offset
[1855]2316
[513]2317            insitu:      if False a new scantable is returned.
2318                         Otherwise, the scaling is done in-situ
2319                         The default is taken from .asaprc (False)
[1846]2320
[513]2321        """
2322        if insitu is None: insitu = rcParams['insitu']
[876]2323        self._math._setinsitu(insitu)
[513]2324        varlist = vars()
[876]2325        s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]2326        s._add_history("add", varlist)
[876]2327        if insitu:
2328            self._assign(s)
2329        else:
[513]2330            return s
2331
[1862]2332    @asaplog_post_dec
[1308]2333    def scale(self, factor, tsys=True, insitu=None):
[1846]2334        """\
2335
[1938]2336        Return a scan where all spectra are scaled by the given 'factor'
[1846]2337
[513]2338        Parameters:
[1846]2339
[1819]2340            factor:      the scaling factor (float or 1D float list)
[1855]2341
[513]2342            insitu:      if False a new scantable is returned.
2343                         Otherwise, the scaling is done in-situ
2344                         The default is taken from .asaprc (False)
[1855]2345
[513]2346            tsys:        if True (default) then apply the operation to Tsys
2347                         as well as the data
[1846]2348
[513]2349        """
2350        if insitu is None: insitu = rcParams['insitu']
[876]2351        self._math._setinsitu(insitu)
[513]2352        varlist = vars()
[1819]2353        s = None
2354        import numpy
2355        if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
2356            if isinstance(factor[0], list) or isinstance(factor[0], numpy.ndarray):
2357                from asapmath import _array2dOp
2358                s = _array2dOp( self.copy(), factor, "MUL", tsys )
2359            else:
2360                s = scantable( self._math._arrayop( self.copy(), factor, "MUL", tsys ) )
2361        else:
2362            s = scantable(self._math._unaryop(self.copy(), factor, "MUL", tsys))
[1118]2363        s._add_history("scale", varlist)
[876]2364        if insitu:
2365            self._assign(s)
2366        else:
[513]2367            return s
2368
[1504]2369    def set_sourcetype(self, match, matchtype="pattern",
2370                       sourcetype="reference"):
[1846]2371        """\
[1502]2372        Set the type of the source to be an source or reference scan
[1846]2373        using the provided pattern.
2374
[1502]2375        Parameters:
[1846]2376
[1504]2377            match:          a Unix style pattern, regular expression or selector
[1855]2378
[1504]2379            matchtype:      'pattern' (default) UNIX style pattern or
2380                            'regex' regular expression
[1855]2381
[1502]2382            sourcetype:     the type of the source to use (source/reference)
[1846]2383
[1502]2384        """
2385        varlist = vars()
2386        basesel = self.get_selection()
2387        stype = -1
2388        if sourcetype.lower().startswith("r"):
2389            stype = 1
2390        elif sourcetype.lower().startswith("s"):
2391            stype = 0
[1504]2392        else:
[1502]2393            raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
[1504]2394        if matchtype.lower().startswith("p"):
2395            matchtype = "pattern"
2396        elif matchtype.lower().startswith("r"):
2397            matchtype = "regex"
2398        else:
2399            raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
[1502]2400        sel = selector()
2401        if isinstance(match, selector):
2402            sel = match
2403        else:
[1504]2404            sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
[1502]2405        self.set_selection(basesel+sel)
2406        self._setsourcetype(stype)
2407        self.set_selection(basesel)
[1573]2408        self._add_history("set_sourcetype", varlist)
[1502]2409
[1862]2410    @asaplog_post_dec
[1857]2411    @preserve_selection
[1819]2412    def auto_quotient(self, preserve=True, mode='paired', verify=False):
[1846]2413        """\
[670]2414        This function allows to build quotients automatically.
[1819]2415        It assumes the observation to have the same number of
[670]2416        "ons" and "offs"
[1846]2417
[670]2418        Parameters:
[1846]2419
[710]2420            preserve:       you can preserve (default) the continuum or
2421                            remove it.  The equations used are
[1857]2422
[670]2423                            preserve: Output = Toff * (on/off) - Toff
[1857]2424
[1070]2425                            remove:   Output = Toff * (on/off) - Ton
[1855]2426
[1573]2427            mode:           the on/off detection mode
[1348]2428                            'paired' (default)
2429                            identifies 'off' scans by the
2430                            trailing '_R' (Mopra/Parkes) or
2431                            '_e'/'_w' (Tid) and matches
2432                            on/off pairs from the observing pattern
[1502]2433                            'time'
2434                            finds the closest off in time
[1348]2435
[1857]2436        .. todo:: verify argument is not implemented
2437
[670]2438        """
[1857]2439        varlist = vars()
[1348]2440        modes = ["time", "paired"]
[670]2441        if not mode in modes:
[876]2442            msg = "please provide valid mode. Valid modes are %s" % (modes)
2443            raise ValueError(msg)
[1348]2444        s = None
2445        if mode.lower() == "paired":
[1857]2446            sel = self.get_selection()
[1875]2447            sel.set_query("SRCTYPE==psoff")
[1356]2448            self.set_selection(sel)
[1348]2449            offs = self.copy()
[1875]2450            sel.set_query("SRCTYPE==pson")
[1356]2451            self.set_selection(sel)
[1348]2452            ons = self.copy()
2453            s = scantable(self._math._quotient(ons, offs, preserve))
2454        elif mode.lower() == "time":
2455            s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]2456        s._add_history("auto_quotient", varlist)
[876]2457        return s
[710]2458
[1862]2459    @asaplog_post_dec
[1145]2460    def mx_quotient(self, mask = None, weight='median', preserve=True):
[1846]2461        """\
[1143]2462        Form a quotient using "off" beams when observing in "MX" mode.
[1846]2463
[1143]2464        Parameters:
[1846]2465
[1145]2466            mask:           an optional mask to be used when weight == 'stddev'
[1855]2467
[1143]2468            weight:         How to average the off beams.  Default is 'median'.
[1855]2469
[1145]2470            preserve:       you can preserve (default) the continuum or
[1855]2471                            remove it.  The equations used are:
[1846]2472
[1855]2473                                preserve: Output = Toff * (on/off) - Toff
2474
2475                                remove:   Output = Toff * (on/off) - Ton
2476
[1217]2477        """
[1593]2478        mask = mask or ()
[1141]2479        varlist = vars()
2480        on = scantable(self._math._mx_extract(self, 'on'))
[1143]2481        preoff = scantable(self._math._mx_extract(self, 'off'))
2482        off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1217]2483        from asapmath  import quotient
[1145]2484        q = quotient(on, off, preserve)
[1143]2485        q._add_history("mx_quotient", varlist)
[1217]2486        return q
[513]2487
[1862]2488    @asaplog_post_dec
[718]2489    def freq_switch(self, insitu=None):
[1846]2490        """\
[718]2491        Apply frequency switching to the data.
[1846]2492
[718]2493        Parameters:
[1846]2494
[718]2495            insitu:      if False a new scantable is returned.
2496                         Otherwise, the swictching is done in-situ
2497                         The default is taken from .asaprc (False)
[1846]2498
[718]2499        """
2500        if insitu is None: insitu = rcParams['insitu']
[876]2501        self._math._setinsitu(insitu)
[718]2502        varlist = vars()
[876]2503        s = scantable(self._math._freqswitch(self))
[1118]2504        s._add_history("freq_switch", varlist)
[1856]2505        if insitu:
2506            self._assign(s)
2507        else:
2508            return s
[718]2509
[1862]2510    @asaplog_post_dec
[780]2511    def recalc_azel(self):
[1846]2512        """Recalculate the azimuth and elevation for each position."""
[780]2513        varlist = vars()
[876]2514        self._recalcazel()
[780]2515        self._add_history("recalc_azel", varlist)
2516        return
2517
[1862]2518    @asaplog_post_dec
[513]2519    def __add__(self, other):
2520        varlist = vars()
2521        s = None
2522        if isinstance(other, scantable):
[1573]2523            s = scantable(self._math._binaryop(self, other, "ADD"))
[513]2524        elif isinstance(other, float):
[876]2525            s = scantable(self._math._unaryop(self, other, "ADD", False))
[513]2526        else:
[718]2527            raise TypeError("Other input is not a scantable or float value")
[513]2528        s._add_history("operator +", varlist)
2529        return s
2530
[1862]2531    @asaplog_post_dec
[513]2532    def __sub__(self, other):
2533        """
2534        implicit on all axes and on Tsys
2535        """
2536        varlist = vars()
2537        s = None
2538        if isinstance(other, scantable):
[1588]2539            s = scantable(self._math._binaryop(self, other, "SUB"))
[513]2540        elif isinstance(other, float):
[876]2541            s = scantable(self._math._unaryop(self, other, "SUB", False))
[513]2542        else:
[718]2543            raise TypeError("Other input is not a scantable or float value")
[513]2544        s._add_history("operator -", varlist)
2545        return s
[710]2546
[1862]2547    @asaplog_post_dec
[513]2548    def __mul__(self, other):
2549        """
2550        implicit on all axes and on Tsys
2551        """
2552        varlist = vars()
2553        s = None
2554        if isinstance(other, scantable):
[1588]2555            s = scantable(self._math._binaryop(self, other, "MUL"))
[513]2556        elif isinstance(other, float):
[876]2557            s = scantable(self._math._unaryop(self, other, "MUL", False))
[513]2558        else:
[718]2559            raise TypeError("Other input is not a scantable or float value")
[513]2560        s._add_history("operator *", varlist)
2561        return s
2562
[710]2563
[1862]2564    @asaplog_post_dec
[513]2565    def __div__(self, other):
2566        """
2567        implicit on all axes and on Tsys
2568        """
2569        varlist = vars()
2570        s = None
2571        if isinstance(other, scantable):
[1589]2572            s = scantable(self._math._binaryop(self, other, "DIV"))
[513]2573        elif isinstance(other, float):
2574            if other == 0.0:
[718]2575                raise ZeroDivisionError("Dividing by zero is not recommended")
[876]2576            s = scantable(self._math._unaryop(self, other, "DIV", False))
[513]2577        else:
[718]2578            raise TypeError("Other input is not a scantable or float value")
[513]2579        s._add_history("operator /", varlist)
2580        return s
2581
[1862]2582    @asaplog_post_dec
[530]2583    def get_fit(self, row=0):
[1846]2584        """\
[530]2585        Print or return the stored fits for a row in the scantable
[1846]2586
[530]2587        Parameters:
[1846]2588
[530]2589            row:    the row which the fit has been applied to.
[1846]2590
[530]2591        """
2592        if row > self.nrow():
2593            return
[976]2594        from asap.asapfit import asapfit
[530]2595        fit = asapfit(self._getfit(row))
[1859]2596        asaplog.push( '%s' %(fit) )
2597        return fit.as_dict()
[530]2598
[1483]2599    def flag_nans(self):
[1846]2600        """\
[1483]2601        Utility function to flag NaN values in the scantable.
2602        """
2603        import numpy
2604        basesel = self.get_selection()
2605        for i in range(self.nrow()):
[1589]2606            sel = self.get_row_selector(i)
2607            self.set_selection(basesel+sel)
[1483]2608            nans = numpy.isnan(self._getspectrum(0))
2609        if numpy.any(nans):
2610            bnans = [ bool(v) for v in nans]
2611            self.flag(bnans)
2612        self.set_selection(basesel)
2613
[1588]2614    def get_row_selector(self, rowno):
[1992]2615        #return selector(beams=self.getbeam(rowno),
2616        #                ifs=self.getif(rowno),
2617        #                pols=self.getpol(rowno),
2618        #                scans=self.getscan(rowno),
2619        #                cycles=self.getcycle(rowno))
2620        return selector(rows=[rowno])
[1573]2621
[484]2622    def _add_history(self, funcname, parameters):
[1435]2623        if not rcParams['scantable.history']:
2624            return
[484]2625        # create date
2626        sep = "##"
2627        from datetime import datetime
2628        dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
2629        hist = dstr+sep
2630        hist += funcname+sep#cdate+sep
2631        if parameters.has_key('self'): del parameters['self']
[1118]2632        for k, v in parameters.iteritems():
[484]2633            if type(v) is dict:
[1118]2634                for k2, v2 in v.iteritems():
[484]2635                    hist += k2
2636                    hist += "="
[1118]2637                    if isinstance(v2, scantable):
[484]2638                        hist += 'scantable'
2639                    elif k2 == 'mask':
[1118]2640                        if isinstance(v2, list) or isinstance(v2, tuple):
[513]2641                            hist += str(self._zip_mask(v2))
2642                        else:
2643                            hist += str(v2)
[484]2644                    else:
[513]2645                        hist += str(v2)
[484]2646            else:
2647                hist += k
2648                hist += "="
[1118]2649                if isinstance(v, scantable):
[484]2650                    hist += 'scantable'
2651                elif k == 'mask':
[1118]2652                    if isinstance(v, list) or isinstance(v, tuple):
[513]2653                        hist += str(self._zip_mask(v))
2654                    else:
2655                        hist += str(v)
[484]2656                else:
2657                    hist += str(v)
2658            hist += sep
2659        hist = hist[:-2] # remove trailing '##'
2660        self._addhistory(hist)
2661
[710]2662
[484]2663    def _zip_mask(self, mask):
2664        mask = list(mask)
2665        i = 0
2666        segments = []
2667        while mask[i:].count(1):
2668            i += mask[i:].index(1)
2669            if mask[i:].count(0):
2670                j = i + mask[i:].index(0)
2671            else:
[710]2672                j = len(mask)
[1118]2673            segments.append([i, j])
[710]2674            i = j
[484]2675        return segments
[714]2676
[626]2677    def _get_ordinate_label(self):
2678        fu = "("+self.get_fluxunit()+")"
2679        import re
2680        lbl = "Intensity"
[1118]2681        if re.match(".K.", fu):
[626]2682            lbl = "Brightness Temperature "+ fu
[1118]2683        elif re.match(".Jy.", fu):
[626]2684            lbl = "Flux density "+ fu
2685        return lbl
[710]2686
[876]2687    def _check_ifs(self):
[1986]2688        #nchans = [self.nchan(i) for i in range(self.nif(-1))]
2689        #nchans = filter(lambda t: t > 0, nchans)
2690        nchans = [self.nchan(i) for i in self.getifnos()]
[876]2691        return (sum(nchans)/len(nchans) == nchans[0])
[976]2692
[1862]2693    @asaplog_post_dec
[1916]2694    #def _fill(self, names, unit, average, getpt, antenna):
2695    def _fill(self, names, unit, average, opts={}):
[976]2696        first = True
2697        fullnames = []
2698        for name in names:
2699            name = os.path.expandvars(name)
2700            name = os.path.expanduser(name)
2701            if not os.path.exists(name):
2702                msg = "File '%s' does not exists" % (name)
2703                raise IOError(msg)
2704            fullnames.append(name)
2705        if average:
2706            asaplog.push('Auto averaging integrations')
[1079]2707        stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]2708        for name in fullnames:
[1073]2709            tbl = Scantable(stype)
[1843]2710            r = filler(tbl)
[1504]2711            rx = rcParams['scantable.reference']
[1843]2712            r.setreferenceexpr(rx)
[976]2713            msg = "Importing %s..." % (name)
[1118]2714            asaplog.push(msg, False)
[1916]2715            #opts = {'ms': {'antenna' : antenna, 'getpt': getpt} }
[1904]2716            r.open(name, opts)# antenna, -1, -1, getpt)
[1843]2717            r.fill()
[976]2718            if average:
[1118]2719                tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]2720            if not first:
2721                tbl = self._math._merge([self, tbl])
2722            Scantable.__init__(self, tbl)
[1843]2723            r.close()
[1118]2724            del r, tbl
[976]2725            first = False
[1861]2726            #flush log
2727        asaplog.post()
[976]2728        if unit is not None:
2729            self.set_fluxunit(unit)
[1824]2730        if not is_casapy():
2731            self.set_freqframe(rcParams['scantable.freqframe'])
[976]2732
[1402]2733    def __getitem__(self, key):
2734        if key < 0:
2735            key += self.nrow()
2736        if key >= self.nrow():
2737            raise IndexError("Row index out of range.")
2738        return self._getspectrum(key)
2739
2740    def __setitem__(self, key, value):
2741        if key < 0:
2742            key += self.nrow()
2743        if key >= self.nrow():
2744            raise IndexError("Row index out of range.")
2745        if not hasattr(value, "__len__") or \
2746                len(value) > self.nchan(self.getif(key)):
2747            raise ValueError("Spectrum length doesn't match.")
2748        return self._setspectrum(value, key)
2749
2750    def __len__(self):
2751        return self.nrow()
2752
2753    def __iter__(self):
2754        for i in range(len(self)):
2755            yield self[i]
Note: See TracBrowser for help on using the repository browser.