source: trunk/python/scantable.py @ 2282

Last change on this file since 2282 was 2277, checked in by Malte Marquarding, 13 years ago

revert accidental changes

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 135.0 KB
RevLine 
[1846]1"""This module defines the scantable class."""
2
[1697]3import os
[1948]4import numpy
[1691]5try:
6    from functools import wraps as wraps_dec
7except ImportError:
8    from asap.compatibility import wraps as wraps_dec
9
[1824]10from asap.env import is_casapy
[876]11from asap._asap import Scantable
[2004]12from asap._asap import filler, msfiller
[1824]13from asap.parameters import rcParams
[1862]14from asap.logging import asaplog, asaplog_post_dec
[1824]15from asap.selector import selector
16from asap.linecatalog import linecatalog
[1600]17from asap.coordinate import coordinate
[1859]18from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
[1907]19from asap.asapfitter import fitter
[102]20
[1689]21
22def preserve_selection(func):
[1691]23    @wraps_dec(func)
[1689]24    def wrap(obj, *args, **kw):
25        basesel = obj.get_selection()
[1857]26        try:
27            val = func(obj, *args, **kw)
28        finally:
29            obj.set_selection(basesel)
[1689]30        return val
31    return wrap
32
[1846]33def is_scantable(filename):
34    """Is the given file a scantable?
[1689]35
[1846]36    Parameters:
37
38        filename: the name of the file/directory to test
39
40    """
[1883]41    if ( os.path.isdir(filename)
42         and os.path.exists(filename+'/table.info')
43         and os.path.exists(filename+'/table.dat') ):
44        f=open(filename+'/table.info')
45        l=f.readline()
46        f.close()
47        #if ( l.find('Scantable') != -1 ):
48        if ( l.find('Measurement Set') == -1 ):
49            return True
50        else:
51            return False
52    else:
53        return False
54##     return (os.path.isdir(filename)
55##             and not os.path.exists(filename+'/table.f1')
56##             and os.path.exists(filename+'/table.info'))
[1697]57
[1883]58def is_ms(filename):
59    """Is the given file a MeasurementSet?
[1697]60
[1883]61    Parameters:
62
63        filename: the name of the file/directory to test
64
65    """
66    if ( os.path.isdir(filename)
67         and os.path.exists(filename+'/table.info')
68         and os.path.exists(filename+'/table.dat') ):
69        f=open(filename+'/table.info')
70        l=f.readline()
71        f.close()
72        if ( l.find('Measurement Set') != -1 ):
73            return True
74        else:
75            return False
76    else:
77        return False
[2186]78
79def normalise_edge_param(edge):
80    """\
81    Convert a given edge value to a one-dimensional array that can be
82    given to baseline-fitting/subtraction functions.
83    The length of the output value will be an even because values for
84    the both sides of spectra are to be contained for each IF. When
85    the length is 2, the values will be applied to all IFs. If the length
86    is larger than 2, it will be 2*ifnos().
87    Accepted format of edge include:
88            * an integer - will be used for both sides of spectra of all IFs.
89                  e.g. 10 is converted to [10,10]
[2277]90            * an empty list/tuple [] - converted to [0, 0] and used for all IFs.
[2186]91            * a list/tuple containing an integer - same as the above case.
92                  e.g. [10] is converted to [10,10]
93            * a list/tuple containing two integers - will be used for all IFs.
94                  e.g. [5,10] is output as it is. no need to convert.
95            * a list/tuple of lists/tuples containing TWO integers -
96                  each element of edge will be used for each IF.
[2277]97                  e.g. [[5,10],[15,20]] - [5,10] for IF[0] and [15,20] for IF[1].
98                 
99                  If an element contains the same integer values, the input 'edge'
100                  parameter can be given in a simpler shape in the following cases:
[2186]101                      ** when len(edge)!=2
[2277]102                          any elements containing the same values can be replaced
103                          to single integers.
104                          e.g. [[15,15]] can be simplified to [15] (or [15,15] or 15 also).
105                          e.g. [[1,1],[2,2],[3,3]] can be simplified to [1,2,3].
[2186]106                      ** when len(edge)=2
107                          care is needed for this case: ONLY ONE of the
108                          elements can be a single integer,
109                          e.g. [[5,5],[10,10]] can be simplified to [5,[10,10]]
[2277]110                               or [[5,5],10], but can NOT be simplified to [5,10].
[2186]111                               when [5,10] given, it is interpreted as
[2277]112                               [[5,10],[5,10],[5,10],...] instead, as shown before.
[2186]113    """
114    from asap import _is_sequence_or_number as _is_valid
115    if isinstance(edge, list) or isinstance(edge, tuple):
116        for edgepar in edge:
117            if not _is_valid(edgepar, int):
118                raise ValueError, "Each element of the 'edge' tuple has \
119                                   to be a pair of integers or an integer."
120            if isinstance(edgepar, list) or isinstance(edgepar, tuple):
121                if len(edgepar) != 2:
122                    raise ValueError, "Each element of the 'edge' tuple has \
123                                       to be a pair of integers or an integer."
124    else:
125        if not _is_valid(edge, int):
126            raise ValueError, "Parameter 'edge' has to be an integer or a \
127                               pair of integers specified as a tuple. \
128                               Nested tuples are allowed \
129                               to make individual selection for different IFs."
130       
131
132    if isinstance(edge, int):
133        edge = [ edge, edge ]                 # e.g. 3   => [3,3]
134    elif isinstance(edge, list) or isinstance(edge, tuple):
135        if len(edge) == 0:
136            edge = [0, 0]                     # e.g. []  => [0,0]
137        elif len(edge) == 1:
138            if isinstance(edge[0], int):
139                edge = [ edge[0], edge[0] ]   # e.g. [1] => [1,1]
140
141    commonedge = True
142    if len(edge) > 2: commonedge = False
143    else:
144        for edgepar in edge:
145            if isinstance(edgepar, list) or isinstance(edgepar, tuple):
146                commonedge = False
147                break
148
149    if commonedge:
150        if len(edge) > 1:
151            norm_edge = edge
152        else:
153            norm_edge = edge + edge
154    else:
155        norm_edge = []
156        for edgepar in edge:
157            if isinstance(edgepar, int):
158                norm_edge += [edgepar, edgepar]
159            else:
160                norm_edge += edgepar
161
162    return norm_edge
163
164def raise_fitting_failure_exception(e):
165    msg = "The fit failed, possibly because it didn't converge."
166    if rcParams["verbose"]:
167        asaplog.push(str(e))
168        asaplog.push(str(msg))
169    else:
170        raise RuntimeError(str(e)+'\n'+msg)
171
[2189]172def pack_progress_params(showprogress, minnrow):
173    return str(showprogress).lower() + ',' + str(minnrow)
174
[876]175class scantable(Scantable):
[1846]176    """\
177        The ASAP container for scans (single-dish data).
[102]178    """
[1819]179
[1862]180    @asaplog_post_dec
[1916]181    #def __init__(self, filename, average=None, unit=None, getpt=None,
182    #             antenna=None, parallactify=None):
[2277]183    def __init__(self, filename, average=None, unit=None, parallactify=None, **args):
[1846]184        """\
[102]185        Create a scantable from a saved one or make a reference
[1846]186
[102]187        Parameters:
[1846]188
189            filename:     the name of an asap table on disk
190                          or
191                          the name of a rpfits/sdfits/ms file
192                          (integrations within scans are auto averaged
193                          and the whole file is read) or
194                          [advanced] a reference to an existing scantable
195
196            average:      average all integrations withinb a scan on read.
197                          The default (True) is taken from .asaprc.
198
[484]199            unit:         brightness unit; must be consistent with K or Jy.
[1846]200                          Over-rides the default selected by the filler
201                          (input rpfits/sdfits/ms) or replaces the value
202                          in existing scantables
203
204            getpt:        for MeasurementSet input data only:
205                          If True, all pointing data are filled.
206                          The deafult is False, which makes time to load
207                          the MS data faster in some cases.
208
[1920]209            antenna:      for MeasurementSet input data only:
[2277]210                          Antenna selection. integer (id) or string (name or id).
[1846]211
212            parallactify: Indicate that the data had been parallatified. Default
213                          is taken from rc file.
214
[710]215        """
[976]216        if average is None:
[710]217            average = rcParams['scantable.autoaverage']
[1916]218        #if getpt is None:
219        #    getpt = True
220        #if antenna is not None:
221        #    asaplog.push("Antenna selection currently unsupported."
222        #                 "Using ''")
223        #    asaplog.post('WARN')
224        #if antenna is None:
225        #    antenna = ''
226        #elif type(antenna) == int:
227        #    antenna = '%s' % antenna
228        #elif type(antenna) == list:
229        #    tmpstr = ''
230        #    for i in range( len(antenna) ):
231        #        if type(antenna[i]) == int:
232        #            tmpstr = tmpstr + ('%s,'%(antenna[i]))
233        #        elif type(antenna[i]) == str:
234        #            tmpstr=tmpstr+antenna[i]+','
235        #        else:
236        #            raise TypeError('Bad antenna selection.')
237        #    antenna = tmpstr.rstrip(',')
[1593]238        parallactify = parallactify or rcParams['scantable.parallactify']
[1259]239        varlist = vars()
[876]240        from asap._asap import stmath
[1819]241        self._math = stmath( rcParams['insitu'] )
[876]242        if isinstance(filename, Scantable):
243            Scantable.__init__(self, filename)
[181]244        else:
[1697]245            if isinstance(filename, str):
[976]246                filename = os.path.expandvars(filename)
247                filename = os.path.expanduser(filename)
248                if not os.path.exists(filename):
249                    s = "File '%s' not found." % (filename)
250                    raise IOError(s)
[1697]251                if is_scantable(filename):
252                    ondisk = rcParams['scantable.storage'] == 'disk'
253                    Scantable.__init__(self, filename, ondisk)
254                    if unit is not None:
255                        self.set_fluxunit(unit)
[2008]256                    if average:
257                        self._assign( self.average_time( scanav=True ) )
[1819]258                    # do not reset to the default freqframe
259                    #self.set_freqframe(rcParams['scantable.freqframe'])
[1883]260                #elif os.path.isdir(filename) \
261                #         and not os.path.exists(filename+'/table.f1'):
262                elif is_ms(filename):
[1916]263                    # Measurement Set
264                    opts={'ms': {}}
265                    mskeys=['getpt','antenna']
266                    for key in mskeys:
267                        if key in args.keys():
268                            opts['ms'][key] = args[key]
269                    #self._fill([filename], unit, average, getpt, antenna)
270                    self._fill([filename], unit, average, opts)
[1893]271                elif os.path.isfile(filename):
[1916]272                    #self._fill([filename], unit, average, getpt, antenna)
273                    self._fill([filename], unit, average)
[1883]274                else:
[1819]275                    msg = "The given file '%s'is not a valid " \
276                          "asap table." % (filename)
[1859]277                    raise IOError(msg)
[1118]278            elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]279                  and isinstance(filename[-1], str):
[1916]280                #self._fill(filename, unit, average, getpt, antenna)
281                self._fill(filename, unit, average)
[1586]282        self.parallactify(parallactify)
[1259]283        self._add_history("scantable", varlist)
[102]284
[1862]285    @asaplog_post_dec
[876]286    def save(self, name=None, format=None, overwrite=False):
[1846]287        """\
[1280]288        Store the scantable on disk. This can be an asap (aips++) Table,
289        SDFITS or MS2 format.
[1846]290
[116]291        Parameters:
[1846]292
[1093]293            name:        the name of the outputfile. For format "ASCII"
294                         this is the root file name (data in 'name'.txt
[497]295                         and header in 'name'_header.txt)
[1855]296
[116]297            format:      an optional file format. Default is ASAP.
[1855]298                         Allowed are:
299
300                            * 'ASAP' (save as ASAP [aips++] Table),
301                            * 'SDFITS' (save as SDFITS file)
302                            * 'ASCII' (saves as ascii text file)
303                            * 'MS2' (saves as an casacore MeasurementSet V2)
[2277]304                            * 'FITS' (save as image FITS - not readable by class)
[1855]305                            * 'CLASS' (save as FITS readable by CLASS)
306
[411]307            overwrite:   If the file should be overwritten if it exists.
[256]308                         The default False is to return with warning
[411]309                         without writing the output. USE WITH CARE.
[1855]310
[1846]311        Example::
312
[116]313            scan.save('myscan.asap')
[1118]314            scan.save('myscan.sdfits', 'SDFITS')
[1846]315
[116]316        """
[411]317        from os import path
[1593]318        format = format or rcParams['scantable.save']
[256]319        suffix = '.'+format.lower()
[1118]320        if name is None or name == "":
[256]321            name = 'scantable'+suffix
[718]322            msg = "No filename given. Using default name %s..." % name
323            asaplog.push(msg)
[411]324        name = path.expandvars(name)
[256]325        if path.isfile(name) or path.isdir(name):
326            if not overwrite:
[718]327                msg = "File %s exists." % name
[1859]328                raise IOError(msg)
[451]329        format2 = format.upper()
330        if format2 == 'ASAP':
[116]331            self._save(name)
[2029]332        elif format2 == 'MS2':
333            msopt = {'ms': {'overwrite': overwrite } }
334            from asap._asap import mswriter
335            writer = mswriter( self )
336            writer.write( name, msopt )
[116]337        else:
[989]338            from asap._asap import stwriter as stw
[1118]339            writer = stw(format2)
340            writer.write(self, name)
[116]341        return
342
[102]343    def copy(self):
[1846]344        """Return a copy of this scantable.
345
346        *Note*:
347
[1348]348            This makes a full (deep) copy. scan2 = scan1 makes a reference.
[1846]349
350        Example::
351
[102]352            copiedscan = scan.copy()
[1846]353
[102]354        """
[876]355        sd = scantable(Scantable._copy(self))
[113]356        return sd
357
[1093]358    def drop_scan(self, scanid=None):
[1846]359        """\
[1093]360        Return a new scantable where the specified scan number(s) has(have)
361        been dropped.
[1846]362
[1093]363        Parameters:
[1846]364
[1093]365            scanid:    a (list of) scan number(s)
[1846]366
[1093]367        """
368        from asap import _is_sequence_or_number as _is_valid
369        from asap import _to_list
370        from asap import unique
371        if not _is_valid(scanid):
[2277]372            raise RuntimeError( 'Please specify a scanno to drop from the scantable' )
[1859]373        scanid = _to_list(scanid)
374        allscans = unique([ self.getscan(i) for i in range(self.nrow())])
375        for sid in scanid: allscans.remove(sid)
376        if len(allscans) == 0:
377            raise ValueError("Can't remove all scans")
378        sel = selector(scans=allscans)
379        return self._select_copy(sel)
[1093]380
[1594]381    def _select_copy(self, selection):
382        orig = self.get_selection()
383        self.set_selection(orig+selection)
384        cp = self.copy()
385        self.set_selection(orig)
386        return cp
387
[102]388    def get_scan(self, scanid=None):
[1855]389        """\
[102]390        Return a specific scan (by scanno) or collection of scans (by
391        source name) in a new scantable.
[1846]392
393        *Note*:
394
[1348]395            See scantable.drop_scan() for the inverse operation.
[1846]396
[102]397        Parameters:
[1846]398
[513]399            scanid:    a (list of) scanno or a source name, unix-style
400                       patterns are accepted for source name matching, e.g.
401                       '*_R' gets all 'ref scans
[1846]402
403        Example::
404
[513]405            # get all scans containing the source '323p459'
406            newscan = scan.get_scan('323p459')
407            # get all 'off' scans
408            refscans = scan.get_scan('*_R')
409            # get a susbset of scans by scanno (as listed in scan.summary())
[1118]410            newscan = scan.get_scan([0, 2, 7, 10])
[1846]411
[102]412        """
413        if scanid is None:
[1859]414            raise RuntimeError( 'Please specify a scan no or name to '
415                                'retrieve from the scantable' )
[102]416        try:
[946]417            bsel = self.get_selection()
418            sel = selector()
[102]419            if type(scanid) is str:
[946]420                sel.set_name(scanid)
[1594]421                return self._select_copy(sel)
[102]422            elif type(scanid) is int:
[946]423                sel.set_scans([scanid])
[1594]424                return self._select_copy(sel)
[381]425            elif type(scanid) is list:
[946]426                sel.set_scans(scanid)
[1594]427                return self._select_copy(sel)
[381]428            else:
[718]429                msg = "Illegal scanid type, use 'int' or 'list' if ints."
[1859]430                raise TypeError(msg)
[102]431        except RuntimeError:
[1859]432            raise
[102]433
434    def __str__(self):
[2178]435        return Scantable._summary(self)
[102]436
[976]437    def summary(self, filename=None):
[1846]438        """\
[102]439        Print a summary of the contents of this scantable.
[1846]440
[102]441        Parameters:
[1846]442
[1931]443            filename:    the name of a file to write the putput to
[102]444                         Default - no file output
[1846]445
[102]446        """
[2178]447        info = Scantable._summary(self)
[102]448        if filename is not None:
[256]449            if filename is "":
450                filename = 'scantable_summary.txt'
[415]451            from os.path import expandvars, isdir
[411]452            filename = expandvars(filename)
[415]453            if not isdir(filename):
[413]454                data = open(filename, 'w')
455                data.write(info)
456                data.close()
457            else:
[718]458                msg = "Illegal file name '%s'." % (filename)
[1859]459                raise IOError(msg)
460        return page(info)
[710]461
[1512]462    def get_spectrum(self, rowno):
[1471]463        """Return the spectrum for the current row in the scantable as a list.
[1846]464
[1471]465        Parameters:
[1846]466
[1573]467             rowno:   the row number to retrieve the spectrum from
[1846]468
[1471]469        """
470        return self._getspectrum(rowno)
[946]471
[1471]472    def get_mask(self, rowno):
473        """Return the mask for the current row in the scantable as a list.
[1846]474
[1471]475        Parameters:
[1846]476
[1573]477             rowno:   the row number to retrieve the mask from
[1846]478
[1471]479        """
480        return self._getmask(rowno)
481
482    def set_spectrum(self, spec, rowno):
[1938]483        """Set the spectrum for the current row in the scantable.
[1846]484
[1471]485        Parameters:
[1846]486
[1855]487             spec:   the new spectrum
[1846]488
[1855]489             rowno:  the row number to set the spectrum for
490
[1471]491        """
492        assert(len(spec) == self.nchan())
493        return self._setspectrum(spec, rowno)
494
[1600]495    def get_coordinate(self, rowno):
496        """Return the (spectral) coordinate for a a given 'rowno'.
[1846]497
498        *Note*:
499
[1600]500            * This coordinate is only valid until a scantable method modifies
501              the frequency axis.
502            * This coordinate does contain the original frequency set-up
503              NOT the new frame. The conversions however are done using the user
504              specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
505              use scantable.freq_align first. Without it there is no closure,
[1846]506              i.e.::
[1600]507
[1846]508                  c = myscan.get_coordinate(0)
509                  c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
510
[1600]511        Parameters:
[1846]512
[1600]513             rowno:    the row number for the spectral coordinate
514
515        """
516        return coordinate(Scantable.get_coordinate(self, rowno))
517
[946]518    def get_selection(self):
[1846]519        """\
[1005]520        Get the selection object currently set on this scantable.
[1846]521
522        Example::
523
[1005]524            sel = scan.get_selection()
525            sel.set_ifs(0)              # select IF 0
526            scan.set_selection(sel)     # apply modified selection
[1846]527
[946]528        """
529        return selector(self._getselection())
530
[1576]531    def set_selection(self, selection=None, **kw):
[1846]532        """\
[1005]533        Select a subset of the data. All following operations on this scantable
534        are only applied to thi selection.
[1846]535
[1005]536        Parameters:
[1697]537
[1846]538            selection:    a selector object (default unset the selection), or
539                          any combination of "pols", "ifs", "beams", "scans",
540                          "cycles", "name", "query"
[1697]541
[1846]542        Examples::
[1697]543
[1005]544            sel = selector()         # create a selection object
[1118]545            self.set_scans([0, 3])    # select SCANNO 0 and 3
[1005]546            scan.set_selection(sel)  # set the selection
547            scan.summary()           # will only print summary of scanno 0 an 3
548            scan.set_selection()     # unset the selection
[1697]549            # or the equivalent
550            scan.set_selection(scans=[0,3])
551            scan.summary()           # will only print summary of scanno 0 an 3
552            scan.set_selection()     # unset the selection
[1846]553
[946]554        """
[1576]555        if selection is None:
556            # reset
557            if len(kw) == 0:
558                selection = selector()
559            else:
560                # try keywords
561                for k in kw:
562                    if k not in selector.fields:
563                        raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
564                selection = selector(**kw)
[946]565        self._setselection(selection)
566
[1819]567    def get_row(self, row=0, insitu=None):
[1846]568        """\
[1819]569        Select a row in the scantable.
570        Return a scantable with single row.
[1846]571
[1819]572        Parameters:
[1846]573
574            row:    row no of integration, default is 0.
575            insitu: if False a new scantable is returned. Otherwise, the
576                    scaling is done in-situ. The default is taken from .asaprc
577                    (False)
578
[1819]579        """
580        if insitu is None: insitu = rcParams['insitu']
581        if not insitu:
582            workscan = self.copy()
583        else:
584            workscan = self
585        # Select a row
586        sel=selector()
[1992]587        sel.set_rows([row])
588        #sel.set_scans([workscan.getscan(row)])
589        #sel.set_cycles([workscan.getcycle(row)])
590        #sel.set_beams([workscan.getbeam(row)])
591        #sel.set_ifs([workscan.getif(row)])
592        #sel.set_polarisations([workscan.getpol(row)])
593        #sel.set_name(workscan._getsourcename(row))
[1819]594        workscan.set_selection(sel)
595        if not workscan.nrow() == 1:
596            msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
597            raise RuntimeError(msg)
598        del sel
599        if insitu:
600            self._assign(workscan)
601        else:
602            return workscan
603
[1862]604    @asaplog_post_dec
[1907]605    def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
[1846]606        """\
[135]607        Determine the specified statistic of the current beam/if/pol
[102]608        Takes a 'mask' as an optional parameter to specify which
609        channels should be excluded.
[1846]610
[102]611        Parameters:
[1846]612
[1819]613            stat:    'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
614                     'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
[1855]615
[135]616            mask:    an optional mask specifying where the statistic
[102]617                     should be determined.
[1855]618
[1819]619            form:    format string to print statistic values
[1846]620
[1907]621            row:     row number of spectrum to process.
622                     (default is None: for all rows)
[1846]623
[1907]624        Example:
[113]625            scan.set_unit('channel')
[1118]626            msk = scan.create_mask([100, 200], [500, 600])
[135]627            scan.stats(stat='mean', mask=m)
[1846]628
[102]629        """
[1593]630        mask = mask or []
[876]631        if not self._check_ifs():
[1118]632            raise ValueError("Cannot apply mask as the IFs have different "
633                             "number of channels. Please use setselection() "
634                             "to select individual IFs")
[1819]635        rtnabc = False
636        if stat.lower().endswith('_abc'): rtnabc = True
637        getchan = False
638        if stat.lower().startswith('min') or stat.lower().startswith('max'):
639            chan = self._math._minmaxchan(self, mask, stat)
640            getchan = True
641            statvals = []
[1907]642        if not rtnabc:
643            if row == None:
644                statvals = self._math._stats(self, mask, stat)
645            else:
646                statvals = self._math._statsrow(self, mask, stat, int(row))
[256]647
[1819]648        #def cb(i):
649        #    return statvals[i]
[256]650
[1819]651        #return self._row_callback(cb, stat)
[102]652
[1819]653        label=stat
654        #callback=cb
655        out = ""
656        #outvec = []
657        sep = '-'*50
[1907]658
659        if row == None:
660            rows = xrange(self.nrow())
661        elif isinstance(row, int):
662            rows = [ row ]
663
664        for i in rows:
[1819]665            refstr = ''
666            statunit= ''
667            if getchan:
668                qx, qy = self.chan2data(rowno=i, chan=chan[i])
669                if rtnabc:
670                    statvals.append(qx['value'])
671                    refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
672                    statunit= '['+qx['unit']+']'
673                else:
674                    refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
675
676            tm = self._gettime(i)
677            src = self._getsourcename(i)
678            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
679            out += 'Time[%s]:\n' % (tm)
[1907]680            if self.nbeam(-1) > 1: out +=  ' Beam[%d] ' % (self.getbeam(i))
681            if self.nif(-1) > 1:   out +=  ' IF[%d] ' % (self.getif(i))
682            if self.npol(-1) > 1:  out +=  ' Pol[%d] ' % (self.getpol(i))
[1819]683            #outvec.append(callback(i))
[1907]684            if len(rows) > 1:
685                # out += ('= %'+form) % (outvec[i]) +'   '+refstr+'\n'
686                out += ('= %'+form) % (statvals[i]) +'   '+refstr+'\n'
687            else:
688                # out += ('= %'+form) % (outvec[0]) +'   '+refstr+'\n'
689                out += ('= %'+form) % (statvals[0]) +'   '+refstr+'\n'
[1819]690            out +=  sep+"\n"
691
[1859]692        import os
693        if os.environ.has_key( 'USER' ):
694            usr = os.environ['USER']
695        else:
696            import commands
697            usr = commands.getoutput( 'whoami' )
698        tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
699        f = open(tmpfile,'w')
700        print >> f, sep
701        print >> f, ' %s %s' % (label, statunit)
702        print >> f, sep
703        print >> f, out
704        f.close()
705        f = open(tmpfile,'r')
706        x = f.readlines()
707        f.close()
708        asaplog.push(''.join(x), False)
709
[1819]710        return statvals
711
712    def chan2data(self, rowno=0, chan=0):
[1846]713        """\
[1819]714        Returns channel/frequency/velocity and spectral value
715        at an arbitrary row and channel in the scantable.
[1846]716
[1819]717        Parameters:
[1846]718
[1819]719            rowno:   a row number in the scantable. Default is the
720                     first row, i.e. rowno=0
[1855]721
[1819]722            chan:    a channel in the scantable. Default is the first
723                     channel, i.e. pos=0
[1846]724
[1819]725        """
726        if isinstance(rowno, int) and isinstance(chan, int):
727            qx = {'unit': self.get_unit(),
728                  'value': self._getabcissa(rowno)[chan]}
729            qy = {'unit': self.get_fluxunit(),
730                  'value': self._getspectrum(rowno)[chan]}
731            return qx, qy
732
[1118]733    def stddev(self, mask=None):
[1846]734        """\
[135]735        Determine the standard deviation of the current beam/if/pol
736        Takes a 'mask' as an optional parameter to specify which
737        channels should be excluded.
[1846]738
[135]739        Parameters:
[1846]740
[135]741            mask:    an optional mask specifying where the standard
742                     deviation should be determined.
743
[1846]744        Example::
745
[135]746            scan.set_unit('channel')
[1118]747            msk = scan.create_mask([100, 200], [500, 600])
[135]748            scan.stddev(mask=m)
[1846]749
[135]750        """
[1118]751        return self.stats(stat='stddev', mask=mask);
[135]752
[1003]753
[1259]754    def get_column_names(self):
[1846]755        """\
[1003]756        Return a  list of column names, which can be used for selection.
757        """
[1259]758        return list(Scantable.get_column_names(self))
[1003]759
[1730]760    def get_tsys(self, row=-1):
[1846]761        """\
[113]762        Return the System temperatures.
[1846]763
764        Parameters:
765
766            row:    the rowno to get the information for. (default all rows)
767
[113]768        Returns:
[1846]769
[876]770            a list of Tsys values for the current selection
[1846]771
[113]772        """
[1730]773        if row > -1:
774            return self._get_column(self._gettsys, row)
[876]775        return self._row_callback(self._gettsys, "Tsys")
[256]776
[1730]777
778    def get_weather(self, row=-1):
[1846]779        """\
780        Return the weather informations.
781
782        Parameters:
783
784            row:    the rowno to get the information for. (default all rows)
785
786        Returns:
787
788            a dict or list of of dicts of values for the current selection
789
790        """
791
[1730]792        values = self._get_column(self._get_weather, row)
793        if row > -1:
794            return {'temperature': values[0],
795                    'pressure': values[1], 'humidity' : values[2],
796                    'windspeed' : values[3], 'windaz' : values[4]
797                    }
798        else:
799            out = []
800            for r in values:
801
802                out.append({'temperature': r[0],
803                            'pressure': r[1], 'humidity' : r[2],
804                            'windspeed' : r[3], 'windaz' : r[4]
805                    })
806            return out
807
[876]808    def _row_callback(self, callback, label):
809        out = ""
[1118]810        outvec = []
[1590]811        sep = '-'*50
[876]812        for i in range(self.nrow()):
813            tm = self._gettime(i)
814            src = self._getsourcename(i)
[1590]815            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
[876]816            out += 'Time[%s]:\n' % (tm)
[1590]817            if self.nbeam(-1) > 1:
818                out +=  ' Beam[%d] ' % (self.getbeam(i))
819            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (self.getif(i))
820            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (self.getpol(i))
[876]821            outvec.append(callback(i))
822            out += '= %3.3f\n' % (outvec[i])
[1590]823            out +=  sep+'\n'
[1859]824
825        asaplog.push(sep)
826        asaplog.push(" %s" % (label))
827        asaplog.push(sep)
828        asaplog.push(out)
[1861]829        asaplog.post()
[1175]830        return outvec
[256]831
[1947]832    def _get_column(self, callback, row=-1, *args):
[1070]833        """
834        """
835        if row == -1:
[1947]836            return [callback(i, *args) for i in range(self.nrow())]
[1070]837        else:
[1819]838            if  0 <= row < self.nrow():
[1947]839                return callback(row, *args)
[256]840
[1070]841
[1948]842    def get_time(self, row=-1, asdatetime=False, prec=-1):
[1846]843        """\
[113]844        Get a list of time stamps for the observations.
[1938]845        Return a datetime object or a string (default) for each
846        integration time stamp in the scantable.
[1846]847
[113]848        Parameters:
[1846]849
[1348]850            row:          row no of integration. Default -1 return all rows
[1855]851
[1348]852            asdatetime:   return values as datetime objects rather than strings
[1846]853
[2277]854            prec:         number of digits shown. Default -1 to automatic calculation.
[1948]855                          Note this number is equals to the digits of MVTime,
856                          i.e., 0<prec<3: dates with hh:: only,
857                          <5: with hh:mm:, <7 or 0: with hh:mm:ss,
[1947]858                          and 6> : with hh:mm:ss.tt... (prec-6 t's added)
859
[113]860        """
[1175]861        from datetime import datetime
[1948]862        if prec < 0:
863            # automagically set necessary precision +1
[1950]864            prec = 7 - numpy.floor(numpy.log10(numpy.min(self.get_inttime(row))))
[1948]865            prec = max(6, int(prec))
866        else:
867            prec = max(0, prec)
868        if asdatetime:
869            #precision can be 1 millisecond at max
870            prec = min(12, prec)
871
[1947]872        times = self._get_column(self._gettime, row, prec)
[1348]873        if not asdatetime:
[1392]874            return times
[1947]875        format = "%Y/%m/%d/%H:%M:%S.%f"
876        if prec < 7:
877            nsub = 1 + (((6-prec)/2) % 3)
878            substr = [".%f","%S","%M"]
879            for i in range(nsub):
880                format = format.replace(substr[i],"")
[1175]881        if isinstance(times, list):
[1947]882            return [datetime.strptime(i, format) for i in times]
[1175]883        else:
[1947]884            return datetime.strptime(times, format)
[102]885
[1348]886
887    def get_inttime(self, row=-1):
[1846]888        """\
[1348]889        Get a list of integration times for the observations.
890        Return a time in seconds for each integration in the scantable.
[1846]891
[1348]892        Parameters:
[1846]893
[1348]894            row:    row no of integration. Default -1 return all rows.
[1846]895
[1348]896        """
[1573]897        return self._get_column(self._getinttime, row)
[1348]898
[1573]899
[714]900    def get_sourcename(self, row=-1):
[1846]901        """\
[794]902        Get a list source names for the observations.
[714]903        Return a string for each integration in the scantable.
904        Parameters:
[1846]905
[1348]906            row:    row no of integration. Default -1 return all rows.
[1846]907
[714]908        """
[1070]909        return self._get_column(self._getsourcename, row)
[714]910
[794]911    def get_elevation(self, row=-1):
[1846]912        """\
[794]913        Get a list of elevations for the observations.
914        Return a float for each integration in the scantable.
[1846]915
[794]916        Parameters:
[1846]917
[1348]918            row:    row no of integration. Default -1 return all rows.
[1846]919
[794]920        """
[1070]921        return self._get_column(self._getelevation, row)
[794]922
923    def get_azimuth(self, row=-1):
[1846]924        """\
[794]925        Get a list of azimuths for the observations.
926        Return a float for each integration in the scantable.
[1846]927
[794]928        Parameters:
[1348]929            row:    row no of integration. Default -1 return all rows.
[1846]930
[794]931        """
[1070]932        return self._get_column(self._getazimuth, row)
[794]933
934    def get_parangle(self, row=-1):
[1846]935        """\
[794]936        Get a list of parallactic angles for the observations.
937        Return a float for each integration in the scantable.
[1846]938
[794]939        Parameters:
[1846]940
[1348]941            row:    row no of integration. Default -1 return all rows.
[1846]942
[794]943        """
[1070]944        return self._get_column(self._getparangle, row)
[794]945
[1070]946    def get_direction(self, row=-1):
947        """
948        Get a list of Positions on the sky (direction) for the observations.
[1594]949        Return a string for each integration in the scantable.
[1855]950
[1070]951        Parameters:
[1855]952
[1070]953            row:    row no of integration. Default -1 return all rows
[1855]954
[1070]955        """
956        return self._get_column(self._getdirection, row)
957
[1391]958    def get_directionval(self, row=-1):
[1846]959        """\
[1391]960        Get a list of Positions on the sky (direction) for the observations.
961        Return a float for each integration in the scantable.
[1846]962
[1391]963        Parameters:
[1846]964
[1391]965            row:    row no of integration. Default -1 return all rows
[1846]966
[1391]967        """
968        return self._get_column(self._getdirectionvec, row)
969
[1862]970    @asaplog_post_dec
[102]971    def set_unit(self, unit='channel'):
[1846]972        """\
[102]973        Set the unit for all following operations on this scantable
[1846]974
[102]975        Parameters:
[1846]976
977            unit:    optional unit, default is 'channel'. Use one of '*Hz',
978                     'km/s', 'channel' or equivalent ''
979
[102]980        """
[484]981        varlist = vars()
[1118]982        if unit in ['', 'pixel', 'channel']:
[113]983            unit = ''
984        inf = list(self._getcoordinfo())
985        inf[0] = unit
986        self._setcoordinfo(inf)
[1118]987        self._add_history("set_unit", varlist)
[113]988
[1862]989    @asaplog_post_dec
[484]990    def set_instrument(self, instr):
[1846]991        """\
[1348]992        Set the instrument for subsequent processing.
[1846]993
[358]994        Parameters:
[1846]995
[710]996            instr:    Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]997                      'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[1846]998
[358]999        """
1000        self._setInstrument(instr)
[1118]1001        self._add_history("set_instument", vars())
[358]1002
[1862]1003    @asaplog_post_dec
[1190]1004    def set_feedtype(self, feedtype):
[1846]1005        """\
[1190]1006        Overwrite the feed type, which might not be set correctly.
[1846]1007
[1190]1008        Parameters:
[1846]1009
[1190]1010            feedtype:     'linear' or 'circular'
[1846]1011
[1190]1012        """
1013        self._setfeedtype(feedtype)
1014        self._add_history("set_feedtype", vars())
1015
[1862]1016    @asaplog_post_dec
[276]1017    def set_doppler(self, doppler='RADIO'):
[1846]1018        """\
[276]1019        Set the doppler for all following operations on this scantable.
[1846]1020
[276]1021        Parameters:
[1846]1022
[276]1023            doppler:    One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
[1846]1024
[276]1025        """
[484]1026        varlist = vars()
[276]1027        inf = list(self._getcoordinfo())
1028        inf[2] = doppler
1029        self._setcoordinfo(inf)
[1118]1030        self._add_history("set_doppler", vars())
[710]1031
[1862]1032    @asaplog_post_dec
[226]1033    def set_freqframe(self, frame=None):
[1846]1034        """\
[113]1035        Set the frame type of the Spectral Axis.
[1846]1036
[113]1037        Parameters:
[1846]1038
[591]1039            frame:   an optional frame type, default 'LSRK'. Valid frames are:
[1819]1040                     'TOPO', 'LSRD', 'LSRK', 'BARY',
[1118]1041                     'GEO', 'GALACTO', 'LGROUP', 'CMB'
[1846]1042
1043        Example::
1044
[113]1045            scan.set_freqframe('BARY')
[1846]1046
[113]1047        """
[1593]1048        frame = frame or rcParams['scantable.freqframe']
[484]1049        varlist = vars()
[1819]1050        # "REST" is not implemented in casacore
1051        #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
1052        #           'GEO', 'GALACTO', 'LGROUP', 'CMB']
1053        valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
[1118]1054                   'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]1055
[989]1056        if frame in valid:
[113]1057            inf = list(self._getcoordinfo())
1058            inf[1] = frame
1059            self._setcoordinfo(inf)
[1118]1060            self._add_history("set_freqframe", varlist)
[102]1061        else:
[1118]1062            msg  = "Please specify a valid freq type. Valid types are:\n", valid
[1859]1063            raise TypeError(msg)
[710]1064
[1862]1065    @asaplog_post_dec
[989]1066    def set_dirframe(self, frame=""):
[1846]1067        """\
[989]1068        Set the frame type of the Direction on the sky.
[1846]1069
[989]1070        Parameters:
[1846]1071
[989]1072            frame:   an optional frame type, default ''. Valid frames are:
1073                     'J2000', 'B1950', 'GALACTIC'
[1846]1074
1075        Example:
1076
[989]1077            scan.set_dirframe('GALACTIC')
[1846]1078
[989]1079        """
1080        varlist = vars()
[1859]1081        Scantable.set_dirframe(self, frame)
[1118]1082        self._add_history("set_dirframe", varlist)
[989]1083
[113]1084    def get_unit(self):
[1846]1085        """\
[113]1086        Get the default unit set in this scantable
[1846]1087
[113]1088        Returns:
[1846]1089
[113]1090            A unit string
[1846]1091
[113]1092        """
1093        inf = self._getcoordinfo()
1094        unit = inf[0]
1095        if unit == '': unit = 'channel'
1096        return unit
[102]1097
[1862]1098    @asaplog_post_dec
[158]1099    def get_abcissa(self, rowno=0):
[1846]1100        """\
[158]1101        Get the abcissa in the current coordinate setup for the currently
[113]1102        selected Beam/IF/Pol
[1846]1103
[113]1104        Parameters:
[1846]1105
[226]1106            rowno:    an optional row number in the scantable. Default is the
1107                      first row, i.e. rowno=0
[1846]1108
[113]1109        Returns:
[1846]1110
[1348]1111            The abcissa values and the format string (as a dictionary)
[1846]1112
[113]1113        """
[256]1114        abc = self._getabcissa(rowno)
[710]1115        lbl = self._getabcissalabel(rowno)
[158]1116        return abc, lbl
[113]1117
[1862]1118    @asaplog_post_dec
[1994]1119    def flag(self, row=-1, mask=None, unflag=False):
[1846]1120        """\
[1001]1121        Flag the selected data using an optional channel mask.
[1846]1122
[1001]1123        Parameters:
[1846]1124
[1994]1125            row:    an optional row number in the scantable.
1126                      Default -1 flags all rows
1127                     
[1001]1128            mask:   an optional channel mask, created with create_mask. Default
1129                    (no mask) is all channels.
[1855]1130
[1819]1131            unflag:    if True, unflag the data
[1846]1132
[1001]1133        """
1134        varlist = vars()
[1593]1135        mask = mask or []
[1994]1136        self._flag(row, mask, unflag)
[1001]1137        self._add_history("flag", varlist)
1138
[1862]1139    @asaplog_post_dec
[1819]1140    def flag_row(self, rows=[], unflag=False):
[1846]1141        """\
[1819]1142        Flag the selected data in row-based manner.
[1846]1143
[1819]1144        Parameters:
[1846]1145
[1843]1146            rows:   list of row numbers to be flagged. Default is no row
[2277]1147                    (must be explicitly specified to execute row-based flagging).
[1855]1148
[1819]1149            unflag: if True, unflag the data.
[1846]1150
[1819]1151        """
1152        varlist = vars()
[1859]1153        self._flag_row(rows, unflag)
[1819]1154        self._add_history("flag_row", varlist)
1155
[1862]1156    @asaplog_post_dec
[1819]1157    def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
[1846]1158        """\
[1819]1159        Flag the selected data outside a specified range (in channel-base)
[1846]1160
[1819]1161        Parameters:
[1846]1162
[1819]1163            uthres:      upper threshold.
[1855]1164
[1819]1165            dthres:      lower threshold
[1846]1166
[2277]1167            clipoutside: True for flagging data outside the range [dthres:uthres].
[1928]1168                         False for flagging data inside the range.
[1855]1169
[1846]1170            unflag:      if True, unflag the data.
1171
[1819]1172        """
1173        varlist = vars()
[1859]1174        self._clip(uthres, dthres, clipoutside, unflag)
[1819]1175        self._add_history("clip", varlist)
1176
[1862]1177    @asaplog_post_dec
[1584]1178    def lag_flag(self, start, end, unit="MHz", insitu=None):
[1846]1179        """\
[1192]1180        Flag the data in 'lag' space by providing a frequency to remove.
[2177]1181        Flagged data in the scantable get interpolated over the region.
[1192]1182        No taper is applied.
[1846]1183
[1192]1184        Parameters:
[1846]1185
[1579]1186            start:    the start frequency (really a period within the
1187                      bandwidth)  or period to remove
[1855]1188
[1579]1189            end:      the end frequency or period to remove
[1855]1190
[1584]1191            unit:     the frequency unit (default "MHz") or "" for
[1579]1192                      explicit lag channels
[1846]1193
1194        *Notes*:
1195
[1579]1196            It is recommended to flag edges of the band or strong
[1348]1197            signals beforehand.
[1846]1198
[1192]1199        """
1200        if insitu is None: insitu = rcParams['insitu']
1201        self._math._setinsitu(insitu)
1202        varlist = vars()
[1579]1203        base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1204        if not (unit == "" or base.has_key(unit)):
[1192]1205            raise ValueError("%s is not a valid unit." % unit)
[1859]1206        if unit == "":
1207            s = scantable(self._math._lag_flag(self, start, end, "lags"))
1208        else:
1209            s = scantable(self._math._lag_flag(self, start*base[unit],
1210                                               end*base[unit], "frequency"))
[1192]1211        s._add_history("lag_flag", varlist)
1212        if insitu:
1213            self._assign(s)
1214        else:
1215            return s
[1001]1216
[1862]1217    @asaplog_post_dec
[2186]1218    def fft(self, rowno=[], mask=[], getrealimag=False):
[2177]1219        """\
1220        Apply FFT to the spectra.
1221        Flagged data in the scantable get interpolated over the region.
1222
1223        Parameters:
[2186]1224
1225            rowno:          The row number(s) to be processed. int, list
1226                            and tuple are accepted. By default ([]), FFT
1227                            is applied to the whole data.
1228
1229            mask:           Auxiliary channel mask(s). Given as a boolean
1230                            list, it is applied to all specified rows.
1231                            A list of boolean lists can also be used to
1232                            apply different masks. In the latter case, the
1233                            length of 'mask' must be the same as that of
1234                            'rowno'. The default is [].
[2177]1235       
1236            getrealimag:    If True, returns the real and imaginary part
1237                            values of the complex results.
1238                            If False (the default), returns the amplitude
1239                            (absolute value) normalised with Ndata/2 and
1240                            phase (argument, in unit of radian).
1241
1242        Returns:
1243
[2186]1244            A list of dictionaries containing the results for each spectrum.
1245            Each dictionary contains two values, the real and the imaginary
1246            parts when getrealimag = True, or the amplitude(absolute value)
1247            and the phase(argument) when getrealimag = False. The key for
1248            these values are 'real' and 'imag', or 'ampl' and 'phase',
[2177]1249            respectively.
1250        """
1251        if isinstance(rowno, int):
1252            rowno = [rowno]
1253        elif not (isinstance(rowno, list) or isinstance(rowno, tuple)):
[2186]1254            raise TypeError("The row number(s) must be int, list or tuple.")
1255
1256        if len(rowno) == 0: rowno = [i for i in xrange(self.nrow())]
1257
1258        if not (isinstance(mask, list) or isinstance(mask, tuple)):
1259            raise TypeError("The mask must be a boolean list or a list of boolean list.")
1260        if len(mask) == 0: mask = [True for i in xrange(self.nchan())]
1261        if isinstance(mask[0], bool): mask = [mask]
1262        elif not (isinstance(mask[0], list) or isinstance(mask[0], tuple)):
1263            raise TypeError("The mask must be a boolean list or a list of boolean list.")
1264
1265        usecommonmask = (len(mask) == 1)
1266        if not usecommonmask:
1267            if len(mask) != len(rowno):
1268                raise ValueError("When specifying masks for each spectrum, the numbers of them must be identical.")
1269        for amask in mask:
1270            if len(amask) != self.nchan():
1271                raise ValueError("The spectra and the mask have different length.")
[2177]1272       
[2186]1273        res = []
1274
1275        imask = 0
1276        for whichrow in rowno:
1277            fspec = self._fft(whichrow, mask[imask], getrealimag)
1278            nspec = len(fspec)
[2177]1279           
[2186]1280            i=0
1281            v1=[]
1282            v2=[]
1283            reselem = {"real":[],"imag":[]} if getrealimag else {"ampl":[],"phase":[]}
[2177]1284           
[2186]1285            while (i < nspec):
1286                v1.append(fspec[i])
1287                v2.append(fspec[i+1])
1288                i+=2
1289           
[2177]1290            if getrealimag:
[2186]1291                reselem["real"]  += v1
1292                reselem["imag"]  += v2
[2177]1293            else:
[2186]1294                reselem["ampl"]  += v1
1295                reselem["phase"] += v2
[2177]1296           
[2186]1297            res.append(reselem)
1298           
1299            if not usecommonmask: imask += 1
1300       
[2177]1301        return res
1302
1303    @asaplog_post_dec
[113]1304    def create_mask(self, *args, **kwargs):
[1846]1305        """\
[1118]1306        Compute and return a mask based on [min, max] windows.
[189]1307        The specified windows are to be INCLUDED, when the mask is
[113]1308        applied.
[1846]1309
[102]1310        Parameters:
[1846]1311
[1118]1312            [min, max], [min2, max2], ...
[1024]1313                Pairs of start/end points (inclusive)specifying the regions
[102]1314                to be masked
[1855]1315
[189]1316            invert:     optional argument. If specified as True,
1317                        return an inverted mask, i.e. the regions
1318                        specified are EXCLUDED
[1855]1319
[513]1320            row:        create the mask using the specified row for
1321                        unit conversions, default is row=0
1322                        only necessary if frequency varies over rows.
[1846]1323
1324        Examples::
1325
[113]1326            scan.set_unit('channel')
[1846]1327            # a)
[1118]1328            msk = scan.create_mask([400, 500], [800, 900])
[189]1329            # masks everything outside 400 and 500
[113]1330            # and 800 and 900 in the unit 'channel'
1331
[1846]1332            # b)
[1118]1333            msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]1334            # masks the regions between 400 and 500
[113]1335            # and 800 and 900 in the unit 'channel'
[1846]1336
1337            # c)
1338            #mask only channel 400
[1554]1339            msk =  scan.create_mask([400])
[1846]1340
[102]1341        """
[1554]1342        row = kwargs.get("row", 0)
[513]1343        data = self._getabcissa(row)
[113]1344        u = self._getcoordinfo()[0]
[1859]1345        if u == "":
1346            u = "channel"
1347        msg = "The current mask window unit is %s" % u
1348        i = self._check_ifs()
1349        if not i:
1350            msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1351        asaplog.push(msg)
[102]1352        n = self.nchan()
[1295]1353        msk = _n_bools(n, False)
[710]1354        # test if args is a 'list' or a 'normal *args - UGLY!!!
1355
[1118]1356        ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1357             and args or args[0]
[710]1358        for window in ws:
[1554]1359            if len(window) == 1:
1360                window = [window[0], window[0]]
1361            if len(window) == 0 or  len(window) > 2:
1362                raise ValueError("A window needs to be defined as [start(, end)]")
[1545]1363            if window[0] > window[1]:
1364                tmp = window[0]
1365                window[0] = window[1]
1366                window[1] = tmp
[102]1367            for i in range(n):
[1024]1368                if data[i] >= window[0] and data[i] <= window[1]:
[1295]1369                    msk[i] = True
[113]1370        if kwargs.has_key('invert'):
1371            if kwargs.get('invert'):
[1295]1372                msk = mask_not(msk)
[102]1373        return msk
[710]1374
[1931]1375    def get_masklist(self, mask=None, row=0, silent=False):
[1846]1376        """\
[1819]1377        Compute and return a list of mask windows, [min, max].
[1846]1378
[1819]1379        Parameters:
[1846]1380
[1819]1381            mask:       channel mask, created with create_mask.
[1855]1382
[1819]1383            row:        calcutate the masklist using the specified row
1384                        for unit conversions, default is row=0
1385                        only necessary if frequency varies over rows.
[1846]1386
[1819]1387        Returns:
[1846]1388
[1819]1389            [min, max], [min2, max2], ...
1390                Pairs of start/end points (inclusive)specifying
1391                the masked regions
[1846]1392
[1819]1393        """
1394        if not (isinstance(mask,list) or isinstance(mask, tuple)):
1395            raise TypeError("The mask should be list or tuple.")
1396        if len(mask) < 2:
1397            raise TypeError("The mask elements should be > 1")
1398        if self.nchan() != len(mask):
1399            msg = "Number of channels in scantable != number of mask elements"
1400            raise TypeError(msg)
1401        data = self._getabcissa(row)
1402        u = self._getcoordinfo()[0]
[1859]1403        if u == "":
1404            u = "channel"
1405        msg = "The current mask window unit is %s" % u
1406        i = self._check_ifs()
1407        if not i:
1408            msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
[1931]1409        if not silent:
1410            asaplog.push(msg)
[1819]1411        masklist=[]
1412        ist, ien = None, None
1413        ist, ien=self.get_mask_indices(mask)
1414        if ist is not None and ien is not None:
1415            for i in xrange(len(ist)):
1416                range=[data[ist[i]],data[ien[i]]]
1417                range.sort()
1418                masklist.append([range[0],range[1]])
1419        return masklist
1420
1421    def get_mask_indices(self, mask=None):
[1846]1422        """\
[1819]1423        Compute and Return lists of mask start indices and mask end indices.
[1855]1424
1425        Parameters:
1426
[1819]1427            mask:       channel mask, created with create_mask.
[1846]1428
[1819]1429        Returns:
[1846]1430
[1819]1431            List of mask start indices and that of mask end indices,
1432            i.e., [istart1,istart2,....], [iend1,iend2,....].
[1846]1433
[1819]1434        """
1435        if not (isinstance(mask,list) or isinstance(mask, tuple)):
1436            raise TypeError("The mask should be list or tuple.")
1437        if len(mask) < 2:
1438            raise TypeError("The mask elements should be > 1")
1439        istart=[]
1440        iend=[]
1441        if mask[0]: istart.append(0)
1442        for i in range(len(mask)-1):
1443            if not mask[i] and mask[i+1]:
1444                istart.append(i+1)
1445            elif mask[i] and not mask[i+1]:
1446                iend.append(i)
1447        if mask[len(mask)-1]: iend.append(len(mask)-1)
1448        if len(istart) != len(iend):
1449            raise RuntimeError("Numbers of mask start != mask end.")
1450        for i in range(len(istart)):
1451            if istart[i] > iend[i]:
1452                raise RuntimeError("Mask start index > mask end index")
1453                break
1454        return istart,iend
1455
[2013]1456    @asaplog_post_dec
1457    def parse_maskexpr(self,maskstring):
1458        """
1459        Parse CASA type mask selection syntax (IF dependent).
1460
1461        Parameters:
1462            maskstring : A string mask selection expression.
1463                         A comma separated selections mean different IF -
1464                         channel combinations. IFs and channel selections
1465                         are partitioned by a colon, ':'.
1466                     examples:
[2015]1467                         ''          = all IFs (all channels)
[2013]1468                         '<2,4~6,9'  = IFs 0,1,4,5,6,9 (all channels)
1469                         '3:3~45;60' = channels 3 to 45 and 60 in IF 3
1470                         '0~1:2~6,8' = channels 2 to 6 in IFs 0,1, and
1471                                       all channels in IF8
1472        Returns:
1473        A dictionary of selected (valid) IF and masklist pairs,
1474        e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
1475        """
1476        if not isinstance(maskstring,str):
1477            asaplog.post()
1478            asaplog.push("Invalid mask expression")
1479            asaplog.post("ERROR")
1480       
1481        valid_ifs = self.getifnos()
1482        frequnit = self.get_unit()
1483        seldict = {}
[2015]1484        if maskstring == "":
1485            maskstring = str(valid_ifs)[1:-1]
[2013]1486        ## split each selection
1487        sellist = maskstring.split(',')
1488        for currselstr in sellist:
1489            selset = currselstr.split(':')
1490            # spw and mask string (may include ~, < or >)
1491            spwmasklist = self._parse_selection(selset[0],typestr='integer',
1492                                               offset=1,minval=min(valid_ifs),
1493                                               maxval=max(valid_ifs))
1494            for spwlist in spwmasklist:
1495                selspws = []
1496                for ispw in range(spwlist[0],spwlist[1]+1):
1497                    # Put into the list only if ispw exists
1498                    if valid_ifs.count(ispw):
1499                        selspws.append(ispw)
1500            del spwmasklist, spwlist
1501
1502            # parse frequency mask list
1503            if len(selset) > 1:
1504                freqmasklist = self._parse_selection(selset[1],typestr='float',
1505                                                    offset=0.)
1506            else:
1507                # want to select the whole spectrum
1508                freqmasklist = [None]
1509
1510            ## define a dictionary of spw - masklist combination
1511            for ispw in selspws:
1512                #print "working on", ispw
1513                spwstr = str(ispw)
1514                if len(selspws) == 0:
1515                    # empty spw
1516                    continue
1517                else:
1518                    ## want to get min and max of the spw and
1519                    ## offset to set for '<' and '>'
1520                    if frequnit == 'channel':
1521                        minfreq = 0
1522                        maxfreq = self.nchan(ifno=ispw)
1523                        offset = 0.5
1524                    else:
1525                        ## This is ugly part. need improvement
1526                        for ifrow in xrange(self.nrow()):
1527                            if self.getif(ifrow) == ispw:
1528                                #print "IF",ispw,"found in row =",ifrow
1529                                break
1530                        freqcoord = self.get_coordinate(ifrow)
1531                        freqs = self._getabcissa(ifrow)
1532                        minfreq = min(freqs)
1533                        maxfreq = max(freqs)
1534                        if len(freqs) == 1:
1535                            offset = 0.5
1536                        elif frequnit.find('Hz') > 0:
1537                            offset = abs(freqcoord.to_frequency(1,unit=frequnit)
1538                                      -freqcoord.to_frequency(0,unit=frequnit))*0.5
1539                        elif frequnit.find('m/s') > 0:
1540                            offset = abs(freqcoord.to_velocity(1,unit=frequnit)
1541                                      -freqcoord.to_velocity(0,unit=frequnit))*0.5
1542                        else:
1543                            asaplog.post()
1544                            asaplog.push("Invalid frequency unit")
1545                            asaplog.post("ERROR")
1546                        del freqs, freqcoord, ifrow
1547                    for freq in freqmasklist:
1548                        selmask = freq or [minfreq, maxfreq]
1549                        if selmask[0] == None:
1550                            ## selection was "<freq[1]".
1551                            if selmask[1] < minfreq:
1552                                ## avoid adding region selection
1553                                selmask = None
1554                            else:
1555                                selmask = [minfreq,selmask[1]-offset]
1556                        elif selmask[1] == None:
1557                            ## selection was ">freq[0]"
1558                            if selmask[0] > maxfreq:
1559                                ## avoid adding region selection
1560                                selmask = None
1561                            else:
1562                                selmask = [selmask[0]+offset,maxfreq]
1563                        if selmask:
1564                            if not seldict.has_key(spwstr):
1565                                # new spw selection
1566                                seldict[spwstr] = []
1567                            seldict[spwstr] += [selmask]
1568                    del minfreq,maxfreq,offset,freq,selmask
1569                del spwstr
1570            del freqmasklist
1571        del valid_ifs
1572        if len(seldict) == 0:
1573            asaplog.post()
1574            asaplog.push("No valid selection in the mask expression: "+maskstring)
1575            asaplog.post("WARN")
1576            return None
1577        msg = "Selected masklist:\n"
1578        for sif, lmask in seldict.iteritems():
1579            msg += "   IF"+sif+" - "+str(lmask)+"\n"
1580        asaplog.push(msg)
1581        return seldict
1582
1583    def _parse_selection(self,selstr,typestr='float',offset=0.,minval=None,maxval=None):
1584        """
1585        Parameters:
1586            selstr :    The Selection string, e.g., '<3;5~7;100~103;9'
1587            typestr :   The type of the values in returned list
1588                        ('integer' or 'float')
1589            offset :    The offset value to subtract from or add to
1590                        the boundary value if the selection string
1591                        includes '<' or '>'
1592            minval, maxval :  The minimum/maximum values to set if the
1593                              selection string includes '<' or '>'.
1594                              The list element is filled with None by default.
1595        Returns:
1596            A list of min/max pair of selections.
1597        Example:
1598            _parseSelection('<3;5~7;9',typestr='int',offset=1,minval=0)
1599            returns [[0,2],[5,7],[9,9]]
1600        """
1601        selgroups = selstr.split(';')
1602        sellists = []
1603        if typestr.lower().startswith('int'):
1604            formatfunc = int
1605        else:
1606            formatfunc = float
1607       
1608        for currsel in  selgroups:
1609            if currsel.find('~') > 0:
1610                minsel = formatfunc(currsel.split('~')[0].strip())
1611                maxsel = formatfunc(currsel.split('~')[1].strip())
1612            elif currsel.strip().startswith('<'):
1613                minsel = minval
1614                maxsel = formatfunc(currsel.split('<')[1].strip()) \
1615                         - formatfunc(offset)
1616            elif currsel.strip().startswith('>'):
1617                minsel = formatfunc(currsel.split('>')[1].strip()) \
1618                         + formatfunc(offset)
1619                maxsel = maxval
1620            else:
1621                minsel = formatfunc(currsel)
1622                maxsel = formatfunc(currsel)
1623            sellists.append([minsel,maxsel])
1624        return sellists
1625
[1819]1626#    def get_restfreqs(self):
1627#        """
1628#        Get the restfrequency(s) stored in this scantable.
1629#        The return value(s) are always of unit 'Hz'
1630#        Parameters:
1631#            none
1632#        Returns:
1633#            a list of doubles
1634#        """
1635#        return list(self._getrestfreqs())
1636
1637    def get_restfreqs(self, ids=None):
[1846]1638        """\
[256]1639        Get the restfrequency(s) stored in this scantable.
1640        The return value(s) are always of unit 'Hz'
[1846]1641
[256]1642        Parameters:
[1846]1643
[1819]1644            ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1645                 be retrieved
[1846]1646
[256]1647        Returns:
[1846]1648
[1819]1649            dictionary containing ids and a list of doubles for each id
[1846]1650
[256]1651        """
[1819]1652        if ids is None:
1653            rfreqs={}
1654            idlist = self.getmolnos()
1655            for i in idlist:
1656                rfreqs[i]=list(self._getrestfreqs(i))
1657            return rfreqs
1658        else:
1659            if type(ids)==list or type(ids)==tuple:
1660                rfreqs={}
1661                for i in ids:
1662                    rfreqs[i]=list(self._getrestfreqs(i))
1663                return rfreqs
1664            else:
1665                return list(self._getrestfreqs(ids))
1666            #return list(self._getrestfreqs(ids))
[102]1667
[931]1668    def set_restfreqs(self, freqs=None, unit='Hz'):
[1846]1669        """\
[931]1670        Set or replace the restfrequency specified and
[1938]1671        if the 'freqs' argument holds a scalar,
[931]1672        then that rest frequency will be applied to all the selected
1673        data.  If the 'freqs' argument holds
1674        a vector, then it MUST be of equal or smaller length than
1675        the number of IFs (and the available restfrequencies will be
1676        replaced by this vector).  In this case, *all* data have
1677        the restfrequency set per IF according
1678        to the corresponding value you give in the 'freqs' vector.
[1118]1679        E.g. 'freqs=[1e9, 2e9]'  would mean IF 0 gets restfreq 1e9 and
[931]1680        IF 1 gets restfreq 2e9.
[1846]1681
[1395]1682        You can also specify the frequencies via a linecatalog.
[1153]1683
[931]1684        Parameters:
[1846]1685
[931]1686            freqs:   list of rest frequency values or string idenitfiers
[1855]1687
[931]1688            unit:    unit for rest frequency (default 'Hz')
[402]1689
[1846]1690
1691        Example::
1692
[1819]1693            # set the given restfrequency for the all currently selected IFs
[931]1694            scan.set_restfreqs(freqs=1.4e9)
[1845]1695            # set restfrequencies for the n IFs  (n > 1) in the order of the
1696            # list, i.e
1697            # IF0 -> 1.4e9, IF1 ->  1.41e9, IF3 -> 1.42e9
1698            # len(list_of_restfreqs) == nIF
1699            # for nIF == 1 the following will set multiple restfrequency for
1700            # that IF
[1819]1701            scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
[1845]1702            # set multiple restfrequencies per IF. as a list of lists where
1703            # the outer list has nIF elements, the inner s arbitrary
1704            scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
[391]1705
[1846]1706       *Note*:
[1845]1707
[931]1708            To do more sophisticate Restfrequency setting, e.g. on a
1709            source and IF basis, use scantable.set_selection() before using
[1846]1710            this function::
[931]1711
[1846]1712                # provided your scantable is called scan
1713                selection = selector()
1714                selection.set_name("ORION*")
1715                selection.set_ifs([1])
1716                scan.set_selection(selection)
1717                scan.set_restfreqs(freqs=86.6e9)
1718
[931]1719        """
1720        varlist = vars()
[1157]1721        from asap import linecatalog
1722        # simple  value
[1118]1723        if isinstance(freqs, int) or isinstance(freqs, float):
[1845]1724            self._setrestfreqs([freqs], [""], unit)
[1157]1725        # list of values
[1118]1726        elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]1727            # list values are scalars
[1118]1728            if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[1845]1729                if len(freqs) == 1:
1730                    self._setrestfreqs(freqs, [""], unit)
1731                else:
1732                    # allow the 'old' mode of setting mulitple IFs
1733                    sel = selector()
1734                    savesel = self._getselection()
1735                    iflist = self.getifnos()
1736                    if len(freqs)>len(iflist):
1737                        raise ValueError("number of elements in list of list "
1738                                         "exeeds the current IF selections")
1739                    iflist = self.getifnos()
1740                    for i, fval in enumerate(freqs):
1741                        sel.set_ifs(iflist[i])
1742                        self._setselection(sel)
1743                        self._setrestfreqs([fval], [""], unit)
1744                    self._setselection(savesel)
1745
1746            # list values are dict, {'value'=, 'name'=)
[1157]1747            elif isinstance(freqs[-1], dict):
[1845]1748                values = []
1749                names = []
1750                for d in freqs:
1751                    values.append(d["value"])
1752                    names.append(d["name"])
1753                self._setrestfreqs(values, names, unit)
[1819]1754            elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
[1157]1755                sel = selector()
1756                savesel = self._getselection()
[1322]1757                iflist = self.getifnos()
[1819]1758                if len(freqs)>len(iflist):
[1845]1759                    raise ValueError("number of elements in list of list exeeds"
1760                                     " the current IF selections")
1761                for i, fval in enumerate(freqs):
[1322]1762                    sel.set_ifs(iflist[i])
[1259]1763                    self._setselection(sel)
[1845]1764                    self._setrestfreqs(fval, [""], unit)
[1157]1765                self._setselection(savesel)
1766        # freqs are to be taken from a linecatalog
[1153]1767        elif isinstance(freqs, linecatalog):
1768            sel = selector()
1769            savesel = self._getselection()
1770            for i in xrange(freqs.nrow()):
[1322]1771                sel.set_ifs(iflist[i])
[1153]1772                self._setselection(sel)
[1845]1773                self._setrestfreqs([freqs.get_frequency(i)],
1774                                   [freqs.get_name(i)], "MHz")
[1153]1775                # ensure that we are not iterating past nIF
1776                if i == self.nif()-1: break
1777            self._setselection(savesel)
[931]1778        else:
1779            return
1780        self._add_history("set_restfreqs", varlist)
1781
[1360]1782    def shift_refpix(self, delta):
[1846]1783        """\
[1589]1784        Shift the reference pixel of the Spectra Coordinate by an
1785        integer amount.
[1846]1786
[1589]1787        Parameters:
[1846]1788
[1589]1789            delta:   the amount to shift by
[1846]1790
1791        *Note*:
1792
[1589]1793            Be careful using this with broadband data.
[1846]1794
[1360]1795        """
[1731]1796        Scantable.shift_refpix(self, delta)
[931]1797
[1862]1798    @asaplog_post_dec
[1259]1799    def history(self, filename=None):
[1846]1800        """\
[1259]1801        Print the history. Optionally to a file.
[1846]1802
[1348]1803        Parameters:
[1846]1804
[1928]1805            filename:    The name of the file to save the history to.
[1846]1806
[1259]1807        """
[484]1808        hist = list(self._gethistory())
[794]1809        out = "-"*80
[484]1810        for h in hist:
[489]1811            if h.startswith("---"):
[1857]1812                out = "\n".join([out, h])
[489]1813            else:
1814                items = h.split("##")
1815                date = items[0]
1816                func = items[1]
1817                items = items[2:]
[794]1818                out += "\n"+date+"\n"
1819                out += "Function: %s\n  Parameters:" % (func)
[489]1820                for i in items:
[1938]1821                    if i == '':
1822                        continue
[489]1823                    s = i.split("=")
[1118]1824                    out += "\n   %s = %s" % (s[0], s[1])
[1857]1825                out = "\n".join([out, "-"*80])
[1259]1826        if filename is not None:
1827            if filename is "":
1828                filename = 'scantable_history.txt'
1829            import os
1830            filename = os.path.expandvars(os.path.expanduser(filename))
1831            if not os.path.isdir(filename):
1832                data = open(filename, 'w')
1833                data.write(out)
1834                data.close()
1835            else:
1836                msg = "Illegal file name '%s'." % (filename)
[1859]1837                raise IOError(msg)
1838        return page(out)
[513]1839    #
1840    # Maths business
1841    #
[1862]1842    @asaplog_post_dec
[931]1843    def average_time(self, mask=None, scanav=False, weight='tint', align=False):
[1846]1844        """\
[1070]1845        Return the (time) weighted average of a scan.
[1846]1846
1847        *Note*:
1848
[1070]1849            in channels only - align if necessary
[1846]1850
[513]1851        Parameters:
[1846]1852
[513]1853            mask:     an optional mask (only used for 'var' and 'tsys'
1854                      weighting)
[1855]1855
[558]1856            scanav:   True averages each scan separately
1857                      False (default) averages all scans together,
[1855]1858
[1099]1859            weight:   Weighting scheme.
1860                      'none'     (mean no weight)
1861                      'var'      (1/var(spec) weighted)
1862                      'tsys'     (1/Tsys**2 weighted)
1863                      'tint'     (integration time weighted)
1864                      'tintsys'  (Tint/Tsys**2)
1865                      'median'   ( median averaging)
[535]1866                      The default is 'tint'
[1855]1867
[931]1868            align:    align the spectra in velocity before averaging. It takes
1869                      the time of the first spectrum as reference time.
[1846]1870
1871        Example::
1872
[513]1873            # time average the scantable without using a mask
[710]1874            newscan = scan.average_time()
[1846]1875
[513]1876        """
1877        varlist = vars()
[1593]1878        weight = weight or 'TINT'
1879        mask = mask or ()
1880        scanav = (scanav and 'SCAN') or 'NONE'
[1118]1881        scan = (self, )
[1859]1882
1883        if align:
1884            scan = (self.freq_align(insitu=False), )
1885        s = None
1886        if weight.upper() == 'MEDIAN':
1887            s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1888                                                     scanav))
1889        else:
1890            s = scantable(self._math._average(scan, mask, weight.upper(),
1891                          scanav))
[1099]1892        s._add_history("average_time", varlist)
[513]1893        return s
[710]1894
[1862]1895    @asaplog_post_dec
[876]1896    def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[1846]1897        """\
[513]1898        Return a scan where all spectra are converted to either
1899        Jansky or Kelvin depending upon the flux units of the scan table.
1900        By default the function tries to look the values up internally.
1901        If it can't find them (or if you want to over-ride), you must
1902        specify EITHER jyperk OR eta (and D which it will try to look up
1903        also if you don't set it). jyperk takes precedence if you set both.
[1846]1904
[513]1905        Parameters:
[1846]1906
[513]1907            jyperk:      the Jy / K conversion factor
[1855]1908
[513]1909            eta:         the aperture efficiency
[1855]1910
[1928]1911            d:           the geometric diameter (metres)
[1855]1912
[513]1913            insitu:      if False a new scantable is returned.
1914                         Otherwise, the scaling is done in-situ
1915                         The default is taken from .asaprc (False)
[1846]1916
[513]1917        """
1918        if insitu is None: insitu = rcParams['insitu']
[876]1919        self._math._setinsitu(insitu)
[513]1920        varlist = vars()
[1593]1921        jyperk = jyperk or -1.0
1922        d = d or -1.0
1923        eta = eta or -1.0
[876]1924        s = scantable(self._math._convertflux(self, d, eta, jyperk))
1925        s._add_history("convert_flux", varlist)
1926        if insitu: self._assign(s)
1927        else: return s
[513]1928
[1862]1929    @asaplog_post_dec
[876]1930    def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[1846]1931        """\
[513]1932        Return a scan after applying a gain-elevation correction.
1933        The correction can be made via either a polynomial or a
1934        table-based interpolation (and extrapolation if necessary).
1935        You specify polynomial coefficients, an ascii table or neither.
1936        If you specify neither, then a polynomial correction will be made
1937        with built in coefficients known for certain telescopes (an error
1938        will occur if the instrument is not known).
1939        The data and Tsys are *divided* by the scaling factors.
[1846]1940
[513]1941        Parameters:
[1846]1942
[513]1943            poly:        Polynomial coefficients (default None) to compute a
1944                         gain-elevation correction as a function of
1945                         elevation (in degrees).
[1855]1946
[513]1947            filename:    The name of an ascii file holding correction factors.
1948                         The first row of the ascii file must give the column
1949                         names and these MUST include columns
1950                         "ELEVATION" (degrees) and "FACTOR" (multiply data
1951                         by this) somewhere.
1952                         The second row must give the data type of the
1953                         column. Use 'R' for Real and 'I' for Integer.
1954                         An example file would be
1955                         (actual factors are arbitrary) :
1956
1957                         TIME ELEVATION FACTOR
1958                         R R R
1959                         0.1 0 0.8
1960                         0.2 20 0.85
1961                         0.3 40 0.9
1962                         0.4 60 0.85
1963                         0.5 80 0.8
1964                         0.6 90 0.75
[1855]1965
[513]1966            method:      Interpolation method when correcting from a table.
1967                         Values are  "nearest", "linear" (default), "cubic"
1968                         and "spline"
[1855]1969
[513]1970            insitu:      if False a new scantable is returned.
1971                         Otherwise, the scaling is done in-situ
1972                         The default is taken from .asaprc (False)
[1846]1973
[513]1974        """
1975
1976        if insitu is None: insitu = rcParams['insitu']
[876]1977        self._math._setinsitu(insitu)
[513]1978        varlist = vars()
[1593]1979        poly = poly or ()
[513]1980        from os.path import expandvars
1981        filename = expandvars(filename)
[876]1982        s = scantable(self._math._gainel(self, poly, filename, method))
1983        s._add_history("gain_el", varlist)
[1593]1984        if insitu:
1985            self._assign(s)
1986        else:
1987            return s
[710]1988
[1862]1989    @asaplog_post_dec
[931]1990    def freq_align(self, reftime=None, method='cubic', insitu=None):
[1846]1991        """\
[513]1992        Return a scan where all rows have been aligned in frequency/velocity.
1993        The alignment frequency frame (e.g. LSRK) is that set by function
1994        set_freqframe.
[1846]1995
[513]1996        Parameters:
[1855]1997
[513]1998            reftime:     reference time to align at. By default, the time of
1999                         the first row of data is used.
[1855]2000
[513]2001            method:      Interpolation method for regridding the spectra.
2002                         Choose from "nearest", "linear", "cubic" (default)
2003                         and "spline"
[1855]2004
[513]2005            insitu:      if False a new scantable is returned.
2006                         Otherwise, the scaling is done in-situ
2007                         The default is taken from .asaprc (False)
[1846]2008
[513]2009        """
[931]2010        if insitu is None: insitu = rcParams["insitu"]
[876]2011        self._math._setinsitu(insitu)
[513]2012        varlist = vars()
[1593]2013        reftime = reftime or ""
[931]2014        s = scantable(self._math._freq_align(self, reftime, method))
[876]2015        s._add_history("freq_align", varlist)
2016        if insitu: self._assign(s)
2017        else: return s
[513]2018
[1862]2019    @asaplog_post_dec
[1725]2020    def opacity(self, tau=None, insitu=None):
[1846]2021        """\
[513]2022        Apply an opacity correction. The data
2023        and Tsys are multiplied by the correction factor.
[1846]2024
[513]2025        Parameters:
[1855]2026
[1689]2027            tau:         (list of) opacity from which the correction factor is
[513]2028                         exp(tau*ZD)
[1689]2029                         where ZD is the zenith-distance.
2030                         If a list is provided, it has to be of length nIF,
2031                         nIF*nPol or 1 and in order of IF/POL, e.g.
2032                         [opif0pol0, opif0pol1, opif1pol0 ...]
[1725]2033                         if tau is `None` the opacities are determined from a
2034                         model.
[1855]2035
[513]2036            insitu:      if False a new scantable is returned.
2037                         Otherwise, the scaling is done in-situ
2038                         The default is taken from .asaprc (False)
[1846]2039
[513]2040        """
2041        if insitu is None: insitu = rcParams['insitu']
[876]2042        self._math._setinsitu(insitu)
[513]2043        varlist = vars()
[1689]2044        if not hasattr(tau, "__len__"):
2045            tau = [tau]
[876]2046        s = scantable(self._math._opacity(self, tau))
2047        s._add_history("opacity", varlist)
2048        if insitu: self._assign(s)
2049        else: return s
[513]2050
[1862]2051    @asaplog_post_dec
[513]2052    def bin(self, width=5, insitu=None):
[1846]2053        """\
[513]2054        Return a scan where all spectra have been binned up.
[1846]2055
[1348]2056        Parameters:
[1846]2057
[513]2058            width:       The bin width (default=5) in pixels
[1855]2059
[513]2060            insitu:      if False a new scantable is returned.
2061                         Otherwise, the scaling is done in-situ
2062                         The default is taken from .asaprc (False)
[1846]2063
[513]2064        """
2065        if insitu is None: insitu = rcParams['insitu']
[876]2066        self._math._setinsitu(insitu)
[513]2067        varlist = vars()
[876]2068        s = scantable(self._math._bin(self, width))
[1118]2069        s._add_history("bin", varlist)
[1589]2070        if insitu:
2071            self._assign(s)
2072        else:
2073            return s
[513]2074
[1862]2075    @asaplog_post_dec
[513]2076    def resample(self, width=5, method='cubic', insitu=None):
[1846]2077        """\
[1348]2078        Return a scan where all spectra have been binned up.
[1573]2079
[1348]2080        Parameters:
[1846]2081
[513]2082            width:       The bin width (default=5) in pixels
[1855]2083
[513]2084            method:      Interpolation method when correcting from a table.
2085                         Values are  "nearest", "linear", "cubic" (default)
2086                         and "spline"
[1855]2087
[513]2088            insitu:      if False a new scantable is returned.
2089                         Otherwise, the scaling is done in-situ
2090                         The default is taken from .asaprc (False)
[1846]2091
[513]2092        """
2093        if insitu is None: insitu = rcParams['insitu']
[876]2094        self._math._setinsitu(insitu)
[513]2095        varlist = vars()
[876]2096        s = scantable(self._math._resample(self, method, width))
[1118]2097        s._add_history("resample", varlist)
[876]2098        if insitu: self._assign(s)
2099        else: return s
[513]2100
[1862]2101    @asaplog_post_dec
[946]2102    def average_pol(self, mask=None, weight='none'):
[1846]2103        """\
[946]2104        Average the Polarisations together.
[1846]2105
[946]2106        Parameters:
[1846]2107
[946]2108            mask:        An optional mask defining the region, where the
2109                         averaging will be applied. The output will have all
2110                         specified points masked.
[1855]2111
[946]2112            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
2113                         weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]2114
[946]2115        """
2116        varlist = vars()
[1593]2117        mask = mask or ()
[1010]2118        s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]2119        s._add_history("average_pol", varlist)
[992]2120        return s
[513]2121
[1862]2122    @asaplog_post_dec
[1145]2123    def average_beam(self, mask=None, weight='none'):
[1846]2124        """\
[1145]2125        Average the Beams together.
[1846]2126
[1145]2127        Parameters:
2128            mask:        An optional mask defining the region, where the
2129                         averaging will be applied. The output will have all
2130                         specified points masked.
[1855]2131
[1145]2132            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
2133                         weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]2134
[1145]2135        """
2136        varlist = vars()
[1593]2137        mask = mask or ()
[1145]2138        s = scantable(self._math._averagebeams(self, mask, weight.upper()))
2139        s._add_history("average_beam", varlist)
2140        return s
2141
[1586]2142    def parallactify(self, pflag):
[1846]2143        """\
[1843]2144        Set a flag to indicate whether this data should be treated as having
[1617]2145        been 'parallactified' (total phase == 0.0)
[1846]2146
[1617]2147        Parameters:
[1855]2148
[1843]2149            pflag:  Bool indicating whether to turn this on (True) or
[1617]2150                    off (False)
[1846]2151
[1617]2152        """
[1586]2153        varlist = vars()
2154        self._parallactify(pflag)
2155        self._add_history("parallactify", varlist)
2156
[1862]2157    @asaplog_post_dec
[992]2158    def convert_pol(self, poltype=None):
[1846]2159        """\
[992]2160        Convert the data to a different polarisation type.
[1565]2161        Note that you will need cross-polarisation terms for most conversions.
[1846]2162
[992]2163        Parameters:
[1855]2164
[992]2165            poltype:    The new polarisation type. Valid types are:
[1565]2166                        "linear", "circular", "stokes" and "linpol"
[1846]2167
[992]2168        """
2169        varlist = vars()
[1859]2170        s = scantable(self._math._convertpol(self, poltype))
[1118]2171        s._add_history("convert_pol", varlist)
[992]2172        return s
2173
[1862]2174    @asaplog_post_dec
[2269]2175    def smooth(self, kernel="hanning", width=5.0, order=2, plot=False,
2176               insitu=None):
[1846]2177        """\
[513]2178        Smooth the spectrum by the specified kernel (conserving flux).
[1846]2179
[513]2180        Parameters:
[1846]2181
[513]2182            kernel:     The type of smoothing kernel. Select from
[1574]2183                        'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
2184                        or 'poly'
[1855]2185
[513]2186            width:      The width of the kernel in pixels. For hanning this is
2187                        ignored otherwise it defauls to 5 pixels.
2188                        For 'gaussian' it is the Full Width Half
2189                        Maximum. For 'boxcar' it is the full width.
[1574]2190                        For 'rmedian' and 'poly' it is the half width.
[1855]2191
[1574]2192            order:      Optional parameter for 'poly' kernel (default is 2), to
2193                        specify the order of the polnomial. Ignored by all other
2194                        kernels.
[1855]2195
[1819]2196            plot:       plot the original and the smoothed spectra.
2197                        In this each indivual fit has to be approved, by
2198                        typing 'y' or 'n'
[1855]2199
[513]2200            insitu:     if False a new scantable is returned.
2201                        Otherwise, the scaling is done in-situ
2202                        The default is taken from .asaprc (False)
[1846]2203
[513]2204        """
2205        if insitu is None: insitu = rcParams['insitu']
[876]2206        self._math._setinsitu(insitu)
[513]2207        varlist = vars()
[1819]2208
2209        if plot: orgscan = self.copy()
2210
[1574]2211        s = scantable(self._math._smooth(self, kernel.lower(), width, order))
[876]2212        s._add_history("smooth", varlist)
[1819]2213
2214        if plot:
[2150]2215            from asap.asapplotter import new_asaplot
2216            theplot = new_asaplot(rcParams['plotter.gui'])
2217            theplot.set_panels()
[1819]2218            ylab=s._get_ordinate_label()
[2150]2219            #theplot.palette(0,["#777777","red"])
[1819]2220            for r in xrange(s.nrow()):
2221                xsm=s._getabcissa(r)
2222                ysm=s._getspectrum(r)
2223                xorg=orgscan._getabcissa(r)
2224                yorg=orgscan._getspectrum(r)
[2150]2225                theplot.clear()
2226                theplot.hold()
2227                theplot.set_axes('ylabel',ylab)
2228                theplot.set_axes('xlabel',s._getabcissalabel(r))
2229                theplot.set_axes('title',s._getsourcename(r))
2230                theplot.set_line(label='Original',color="#777777")
2231                theplot.plot(xorg,yorg)
2232                theplot.set_line(label='Smoothed',color="red")
2233                theplot.plot(xsm,ysm)
[1819]2234                ### Ugly part for legend
2235                for i in [0,1]:
[2150]2236                    theplot.subplots[0]['lines'].append([theplot.subplots[0]['axes'].lines[i]])
2237                theplot.release()
[1819]2238                ### Ugly part for legend
[2150]2239                theplot.subplots[0]['lines']=[]
[1819]2240                res = raw_input("Accept smoothing ([y]/n): ")
2241                if res.upper() == 'N':
2242                    s._setspectrum(yorg, r)
[2150]2243            theplot.quit()
2244            del theplot
[1819]2245            del orgscan
2246
[876]2247        if insitu: self._assign(s)
2248        else: return s
[513]2249
[2186]2250    @asaplog_post_dec
2251    def _parse_wn(self, wn):
2252        if isinstance(wn, list) or isinstance(wn, tuple):
2253            return wn
2254        elif isinstance(wn, int):
2255            return [ wn ]
2256        elif isinstance(wn, str):
[2277]2257            if '-' in wn:                            # case 'a-b' : return [a,a+1,...,b-1,b]
[2186]2258                val = wn.split('-')
2259                val = [int(val[0]), int(val[1])]
2260                val.sort()
2261                res = [i for i in xrange(val[0], val[1]+1)]
[2277]2262            elif wn[:2] == '<=' or wn[:2] == '=<':   # cases '<=a','=<a' : return [0,1,...,a-1,a]
[2186]2263                val = int(wn[2:])+1
2264                res = [i for i in xrange(val)]
[2277]2265            elif wn[-2:] == '>=' or wn[-2:] == '=>': # cases 'a>=','a=>' : return [0,1,...,a-1,a]
[2186]2266                val = int(wn[:-2])+1
2267                res = [i for i in xrange(val)]
[2277]2268            elif wn[0] == '<':                       # case '<a' :         return [0,1,...,a-2,a-1]
[2186]2269                val = int(wn[1:])
2270                res = [i for i in xrange(val)]
[2277]2271            elif wn[-1] == '>':                      # case 'a>' :         return [0,1,...,a-2,a-1]
[2186]2272                val = int(wn[:-1])
2273                res = [i for i in xrange(val)]
[2277]2274            elif wn[:2] == '>=' or wn[:2] == '=>':   # cases '>=a','=>a' : return [a,a+1,...,a_nyq]
[2186]2275                val = int(wn[2:])
2276                res = [i for i in xrange(val, self.nchan()/2+1)]
[2277]2277            elif wn[-2:] == '<=' or wn[-2:] == '=<': # cases 'a<=','a=<' : return [a,a+1,...,a_nyq]
[2186]2278                val = int(wn[:-2])
2279                res = [i for i in xrange(val, self.nchan()/2+1)]
[2277]2280            elif wn[0] == '>':                       # case '>a' :         return [a+1,a+2,...,a_nyq]
[2186]2281                val = int(wn[1:])+1
2282                res = [i for i in xrange(val, self.nchan()/2+1)]
[2277]2283            elif wn[-1] == '<':                      # case 'a<' :         return [a+1,a+2,...,a_nyq]
[2186]2284                val = int(wn[:-1])+1
2285                res = [i for i in xrange(val, self.nchan()/2+1)]
[2012]2286
[2186]2287            return res
2288        else:
2289            msg = 'wrong value given for addwn/rejwn'
2290            raise RuntimeError(msg)
2291
2292
[1862]2293    @asaplog_post_dec
[2277]2294    def sinusoid_baseline(self, insitu=None, mask=None, applyfft=None,
[2269]2295                          fftmethod=None, fftthresh=None,
2296                          addwn=None, rejwn=None, clipthresh=None,
2297                          clipniter=None, plot=None,
2298                          getresidual=None, showprogress=None,
2299                          minnrow=None, outlog=None, blfile=None):
[2047]2300        """\
[2094]2301        Return a scan which has been baselined (all rows) with sinusoidal functions.
[2047]2302        Parameters:
[2186]2303            insitu:        if False a new scantable is returned.
[2081]2304                           Otherwise, the scaling is done in-situ
2305                           The default is taken from .asaprc (False)
[2186]2306            mask:          an optional mask
2307            applyfft:      if True use some method, such as FFT, to find
2308                           strongest sinusoidal components in the wavenumber
2309                           domain to be used for baseline fitting.
2310                           default is True.
2311            fftmethod:     method to find the strong sinusoidal components.
2312                           now only 'fft' is available and it is the default.
2313            fftthresh:     the threshold to select wave numbers to be used for
2314                           fitting from the distribution of amplitudes in the
2315                           wavenumber domain.
2316                           both float and string values accepted.
2317                           given a float value, the unit is set to sigma.
2318                           for string values, allowed formats include:
2319                               'xsigma' or 'x' (= x-sigma level. e.g., '3sigma'), or
2320                               'topx' (= the x strongest ones, e.g. 'top5').
2321                           default is 3.0 (unit: sigma).
2322            addwn:         the additional wave numbers to be used for fitting.
2323                           list or integer value is accepted to specify every
2324                           wave numbers. also string value can be used in case
2325                           you need to specify wave numbers in a certain range,
2326                           e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
2327                                 '<a'  (= 0,1,...,a-2,a-1),
2328                                 '>=a' (= a, a+1, ... up to the maximum wave
2329                                        number corresponding to the Nyquist
2330                                        frequency for the case of FFT).
2331                           default is [].
2332            rejwn:         the wave numbers NOT to be used for fitting.
2333                           can be set just as addwn but has higher priority:
2334                           wave numbers which are specified both in addwn
2335                           and rejwn will NOT be used. default is [].
[2081]2336            clipthresh:    Clipping threshold. (default is 3.0, unit: sigma)
[2129]2337            clipniter:     maximum number of iteration of 'clipthresh'-sigma clipping (default is 0)
[2081]2338            plot:      *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2339                           plot the fit and the residual. In this each
2340                           indivual fit has to be approved, by typing 'y'
2341                           or 'n'
2342            getresidual:   if False, returns best-fit values instead of
2343                           residual. (default is True)
[2189]2344            showprogress:  show progress status for large data.
2345                           default is True.
2346            minnrow:       minimum number of input spectra to show.
2347                           default is 1000.
[2081]2348            outlog:        Output the coefficients of the best-fit
2349                           function to logger (default is False)
2350            blfile:        Name of a text file in which the best-fit
2351                           parameter values to be written
[2186]2352                           (default is '': no file/logger output)
[2047]2353
2354        Example:
2355            # return a scan baselined by a combination of sinusoidal curves having
[2081]2356            # wave numbers in spectral window up to 10,
[2047]2357            # also with 3-sigma clipping, iteration up to 4 times
[2186]2358            bscan = scan.sinusoid_baseline(addwn='<=10',clipthresh=3.0,clipniter=4)
[2081]2359
2360        Note:
2361            The best-fit parameter values output in logger and/or blfile are now
2362            based on specunit of 'channel'.
[2047]2363        """
2364       
[2186]2365        try:
2366            varlist = vars()
[2047]2367       
[2186]2368            if insitu is None: insitu = rcParams['insitu']
2369            if insitu:
2370                workscan = self
2371            else:
2372                workscan = self.copy()
2373           
[2277]2374            if mask          is None: mask          = [True for i in xrange(workscan.nchan())]
[2186]2375            if applyfft      is None: applyfft      = True
2376            if fftmethod     is None: fftmethod     = 'fft'
2377            if fftthresh     is None: fftthresh     = 3.0
2378            if addwn         is None: addwn         = []
2379            if rejwn         is None: rejwn         = []
2380            if clipthresh    is None: clipthresh    = 3.0
2381            if clipniter     is None: clipniter     = 0
2382            if plot          is None: plot          = False
2383            if getresidual   is None: getresidual   = True
[2189]2384            if showprogress  is None: showprogress  = True
2385            if minnrow       is None: minnrow       = 1000
[2186]2386            if outlog        is None: outlog        = False
2387            if blfile        is None: blfile        = ''
[2047]2388
[2081]2389            #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
[2277]2390            workscan._sinusoid_baseline(mask, applyfft, fftmethod.lower(), str(fftthresh).lower(), workscan._parse_wn(addwn), workscan._parse_wn(rejwn), clipthresh, clipniter, getresidual, pack_progress_params(showprogress, minnrow), outlog, blfile)
[2186]2391            workscan._add_history('sinusoid_baseline', varlist)
[2047]2392           
2393            if insitu:
2394                self._assign(workscan)
2395            else:
2396                return workscan
2397           
2398        except RuntimeError, e:
[2186]2399            raise_fitting_failure_exception(e)
[2047]2400
2401
[2186]2402    @asaplog_post_dec
[2277]2403    def auto_sinusoid_baseline(self, insitu=None, mask=None, applyfft=None, fftmethod=None, fftthresh=None,
2404                               addwn=None, rejwn=None, clipthresh=None, clipniter=None, edge=None, threshold=None,
2405                               chan_avg_limit=None, plot=None, getresidual=None, showprogress=None, minnrow=None,
[2189]2406                               outlog=None, blfile=None):
[2047]2407        """\
[2277]2408        Return a scan which has been baselined (all rows) with sinusoidal functions.
[2047]2409        Spectral lines are detected first using linefinder and masked out
2410        to avoid them affecting the baseline solution.
2411
2412        Parameters:
[2189]2413            insitu:         if False a new scantable is returned.
2414                            Otherwise, the scaling is done in-situ
2415                            The default is taken from .asaprc (False)
2416            mask:           an optional mask retreived from scantable
2417            applyfft:       if True use some method, such as FFT, to find
2418                            strongest sinusoidal components in the wavenumber
2419                            domain to be used for baseline fitting.
2420                            default is True.
2421            fftmethod:      method to find the strong sinusoidal components.
2422                            now only 'fft' is available and it is the default.
2423            fftthresh:      the threshold to select wave numbers to be used for
2424                            fitting from the distribution of amplitudes in the
2425                            wavenumber domain.
2426                            both float and string values accepted.
2427                            given a float value, the unit is set to sigma.
2428                            for string values, allowed formats include:
[2277]2429                                'xsigma' or 'x' (= x-sigma level. e.g., '3sigma'), or
[2189]2430                                'topx' (= the x strongest ones, e.g. 'top5').
2431                            default is 3.0 (unit: sigma).
2432            addwn:          the additional wave numbers to be used for fitting.
2433                            list or integer value is accepted to specify every
2434                            wave numbers. also string value can be used in case
2435                            you need to specify wave numbers in a certain range,
2436                            e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
2437                                  '<a'  (= 0,1,...,a-2,a-1),
2438                                  '>=a' (= a, a+1, ... up to the maximum wave
2439                                         number corresponding to the Nyquist
2440                                         frequency for the case of FFT).
2441                            default is [].
2442            rejwn:          the wave numbers NOT to be used for fitting.
2443                            can be set just as addwn but has higher priority:
2444                            wave numbers which are specified both in addwn
2445                            and rejwn will NOT be used. default is [].
2446            clipthresh:     Clipping threshold. (default is 3.0, unit: sigma)
[2277]2447            clipniter:      maximum number of iteration of 'clipthresh'-sigma clipping (default is 0)
[2189]2448            edge:           an optional number of channel to drop at
2449                            the edge of spectrum. If only one value is
2450                            specified, the same number will be dropped
2451                            from both sides of the spectrum. Default
2452                            is to keep all channels. Nested tuples
2453                            represent individual edge selection for
2454                            different IFs (a number of spectral channels
2455                            can be different)
2456            threshold:      the threshold used by line finder. It is
2457                            better to keep it large as only strong lines
2458                            affect the baseline solution.
2459            chan_avg_limit: a maximum number of consequtive spectral
2460                            channels to average during the search of
2461                            weak and broad lines. The default is no
2462                            averaging (and no search for weak lines).
2463                            If such lines can affect the fitted baseline
2464                            (e.g. a high order polynomial is fitted),
2465                            increase this parameter (usually values up
2466                            to 8 are reasonable). Most users of this
2467                            method should find the default value sufficient.
2468            plot:       *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2469                            plot the fit and the residual. In this each
2470                            indivual fit has to be approved, by typing 'y'
2471                            or 'n'
2472            getresidual:    if False, returns best-fit values instead of
2473                            residual. (default is True)
2474            showprogress:   show progress status for large data.
2475                            default is True.
2476            minnrow:        minimum number of input spectra to show.
2477                            default is 1000.
2478            outlog:         Output the coefficients of the best-fit
2479                            function to logger (default is False)
2480            blfile:         Name of a text file in which the best-fit
2481                            parameter values to be written
2482                            (default is "": no file/logger output)
[2047]2483
2484        Example:
[2186]2485            bscan = scan.auto_sinusoid_baseline(addwn='<=10', insitu=False)
[2081]2486       
2487        Note:
2488            The best-fit parameter values output in logger and/or blfile are now
2489            based on specunit of 'channel'.
[2047]2490        """
2491
[2186]2492        try:
2493            varlist = vars()
[2047]2494
[2186]2495            if insitu is None: insitu = rcParams['insitu']
2496            if insitu:
2497                workscan = self
[2047]2498            else:
[2186]2499                workscan = self.copy()
2500           
[2277]2501            if mask           is None: mask           = [True for i in xrange(workscan.nchan())]
[2186]2502            if applyfft       is None: applyfft       = True
2503            if fftmethod      is None: fftmethod      = 'fft'
2504            if fftthresh      is None: fftthresh      = 3.0
2505            if addwn          is None: addwn          = []
2506            if rejwn          is None: rejwn          = []
2507            if clipthresh     is None: clipthresh     = 3.0
2508            if clipniter      is None: clipniter      = 0
2509            if edge           is None: edge           = (0,0)
2510            if threshold      is None: threshold      = 3
2511            if chan_avg_limit is None: chan_avg_limit = 1
2512            if plot           is None: plot           = False
2513            if getresidual    is None: getresidual    = True
[2189]2514            if showprogress   is None: showprogress   = True
2515            if minnrow        is None: minnrow        = 1000
[2186]2516            if outlog         is None: outlog         = False
2517            if blfile         is None: blfile         = ''
[2047]2518
[2277]2519            #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
2520            workscan._auto_sinusoid_baseline(mask, applyfft, fftmethod.lower(), str(fftthresh).lower(), workscan._parse_wn(addwn), workscan._parse_wn(rejwn), clipthresh, clipniter, normalise_edge_param(edge), threshold, chan_avg_limit, getresidual, pack_progress_params(showprogress, minnrow), outlog, blfile)
[2047]2521            workscan._add_history("auto_sinusoid_baseline", varlist)
2522           
2523            if insitu:
2524                self._assign(workscan)
2525            else:
2526                return workscan
2527           
2528        except RuntimeError, e:
[2186]2529            raise_fitting_failure_exception(e)
[2047]2530
2531    @asaplog_post_dec
[2277]2532    def cspline_baseline(self, insitu=None, mask=None, npiece=None, clipthresh=None, clipniter=None,
2533                         plot=None, getresidual=None, showprogress=None, minnrow=None, outlog=None, blfile=None):
[1846]2534        """\
[2277]2535        Return a scan which has been baselined (all rows) by cubic spline function (piecewise cubic polynomial).
[513]2536        Parameters:
[2189]2537            insitu:       If False a new scantable is returned.
2538                          Otherwise, the scaling is done in-situ
2539                          The default is taken from .asaprc (False)
2540            mask:         An optional mask
2541            npiece:       Number of pieces. (default is 2)
2542            clipthresh:   Clipping threshold. (default is 3.0, unit: sigma)
[2277]2543            clipniter:    maximum number of iteration of 'clipthresh'-sigma clipping (default is 0)
[2189]2544            plot:     *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2545                          plot the fit and the residual. In this each
2546                          indivual fit has to be approved, by typing 'y'
2547                          or 'n'
2548            getresidual:  if False, returns best-fit values instead of
2549                          residual. (default is True)
2550            showprogress: show progress status for large data.
2551                          default is True.
2552            minnrow:      minimum number of input spectra to show.
2553                          default is 1000.
2554            outlog:       Output the coefficients of the best-fit
2555                          function to logger (default is False)
2556            blfile:       Name of a text file in which the best-fit
2557                          parameter values to be written
2558                          (default is "": no file/logger output)
[1846]2559
[2012]2560        Example:
[2277]2561            # return a scan baselined by a cubic spline consisting of 2 pieces (i.e., 1 internal knot),
[2012]2562            # also with 3-sigma clipping, iteration up to 4 times
2563            bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
[2081]2564       
2565        Note:
2566            The best-fit parameter values output in logger and/or blfile are now
2567            based on specunit of 'channel'.
[2012]2568        """
2569       
[2186]2570        try:
2571            varlist = vars()
2572           
2573            if insitu is None: insitu = rcParams['insitu']
2574            if insitu:
2575                workscan = self
2576            else:
2577                workscan = self.copy()
[1855]2578
[2277]2579            if mask         is None: mask         = [True for i in xrange(workscan.nchan())]
[2189]2580            if npiece       is None: npiece       = 2
2581            if clipthresh   is None: clipthresh   = 3.0
2582            if clipniter    is None: clipniter    = 0
2583            if plot         is None: plot         = False
2584            if getresidual  is None: getresidual  = True
2585            if showprogress is None: showprogress = True
2586            if minnrow      is None: minnrow      = 1000
2587            if outlog       is None: outlog       = False
2588            if blfile       is None: blfile       = ''
[1855]2589
[2012]2590            #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
[2277]2591            workscan._cspline_baseline(mask, npiece, clipthresh, clipniter, getresidual, pack_progress_params(showprogress, minnrow), outlog, blfile)
[2012]2592            workscan._add_history("cspline_baseline", varlist)
2593           
2594            if insitu:
2595                self._assign(workscan)
2596            else:
2597                return workscan
2598           
2599        except RuntimeError, e:
[2186]2600            raise_fitting_failure_exception(e)
[1855]2601
[2186]2602    @asaplog_post_dec
[2277]2603    def auto_cspline_baseline(self, insitu=None, mask=None, npiece=None, clipthresh=None, clipniter=None,
2604                              edge=None, threshold=None, chan_avg_limit=None, getresidual=None, plot=None,
2605                              showprogress=None, minnrow=None, outlog=None, blfile=None):
[2012]2606        """\
2607        Return a scan which has been baselined (all rows) by cubic spline
2608        function (piecewise cubic polynomial).
2609        Spectral lines are detected first using linefinder and masked out
2610        to avoid them affecting the baseline solution.
2611
2612        Parameters:
[2189]2613            insitu:         if False a new scantable is returned.
2614                            Otherwise, the scaling is done in-situ
2615                            The default is taken from .asaprc (False)
2616            mask:           an optional mask retreived from scantable
2617            npiece:         Number of pieces. (default is 2)
2618            clipthresh:     Clipping threshold. (default is 3.0, unit: sigma)
[2277]2619            clipniter:      maximum number of iteration of 'clipthresh'-sigma clipping (default is 0)
[2189]2620            edge:           an optional number of channel to drop at
2621                            the edge of spectrum. If only one value is
2622                            specified, the same number will be dropped
2623                            from both sides of the spectrum. Default
2624                            is to keep all channels. Nested tuples
2625                            represent individual edge selection for
2626                            different IFs (a number of spectral channels
2627                            can be different)
2628            threshold:      the threshold used by line finder. It is
2629                            better to keep it large as only strong lines
2630                            affect the baseline solution.
2631            chan_avg_limit: a maximum number of consequtive spectral
2632                            channels to average during the search of
2633                            weak and broad lines. The default is no
2634                            averaging (and no search for weak lines).
2635                            If such lines can affect the fitted baseline
2636                            (e.g. a high order polynomial is fitted),
2637                            increase this parameter (usually values up
2638                            to 8 are reasonable). Most users of this
2639                            method should find the default value sufficient.
2640            plot:       *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2641                            plot the fit and the residual. In this each
2642                            indivual fit has to be approved, by typing 'y'
2643                            or 'n'
2644            getresidual:    if False, returns best-fit values instead of
2645                            residual. (default is True)
2646            showprogress:   show progress status for large data.
2647                            default is True.
2648            minnrow:        minimum number of input spectra to show.
2649                            default is 1000.
2650            outlog:         Output the coefficients of the best-fit
2651                            function to logger (default is False)
2652            blfile:         Name of a text file in which the best-fit
2653                            parameter values to be written
2654                            (default is "": no file/logger output)
[1846]2655
[1907]2656        Example:
[2012]2657            bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
[2081]2658       
2659        Note:
2660            The best-fit parameter values output in logger and/or blfile are now
2661            based on specunit of 'channel'.
[2012]2662        """
[1846]2663
[2186]2664        try:
2665            varlist = vars()
[2012]2666
[2186]2667            if insitu is None: insitu = rcParams['insitu']
2668            if insitu:
2669                workscan = self
[1391]2670            else:
[2186]2671                workscan = self.copy()
2672           
[2277]2673            if mask           is None: mask           = [True for i in xrange(workscan.nchan())]
[2186]2674            if npiece         is None: npiece         = 2
2675            if clipthresh     is None: clipthresh     = 3.0
2676            if clipniter      is None: clipniter      = 0
2677            if edge           is None: edge           = (0, 0)
2678            if threshold      is None: threshold      = 3
2679            if chan_avg_limit is None: chan_avg_limit = 1
2680            if plot           is None: plot           = False
2681            if getresidual    is None: getresidual    = True
[2189]2682            if showprogress   is None: showprogress   = True
2683            if minnrow        is None: minnrow        = 1000
[2186]2684            if outlog         is None: outlog         = False
2685            if blfile         is None: blfile         = ''
[1819]2686
[2277]2687            #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
[2269]2688            workscan._auto_cspline_baseline(mask, npiece, clipthresh,
2689                                            clipniter,
2690                                            normalise_edge_param(edge),
2691                                            threshold,
2692                                            chan_avg_limit, getresidual,
2693                                            pack_progress_params(showprogress,
2694                                                                 minnrow),
2695                                            outlog, blfile)
[2012]2696            workscan._add_history("auto_cspline_baseline", varlist)
[1907]2697           
[1856]2698            if insitu:
2699                self._assign(workscan)
2700            else:
2701                return workscan
[2012]2702           
2703        except RuntimeError, e:
[2186]2704            raise_fitting_failure_exception(e)
[513]2705
[1931]2706    @asaplog_post_dec
[2269]2707    def poly_baseline(self, mask=None, order=None, insitu=None, plot=None,
2708                      getresidual=None, showprogress=None, minnrow=None,
2709                      outlog=None, blfile=None):
[1907]2710        """\
2711        Return a scan which has been baselined (all rows) by a polynomial.
2712        Parameters:
[2189]2713            insitu:       if False a new scantable is returned.
2714                          Otherwise, the scaling is done in-situ
2715                          The default is taken from .asaprc (False)
2716            mask:         an optional mask
2717            order:        the order of the polynomial (default is 0)
2718            plot:         plot the fit and the residual. In this each
2719                          indivual fit has to be approved, by typing 'y'
2720                          or 'n'
2721            getresidual:  if False, returns best-fit values instead of
2722                          residual. (default is True)
2723            showprogress: show progress status for large data.
2724                          default is True.
2725            minnrow:      minimum number of input spectra to show.
2726                          default is 1000.
2727            outlog:       Output the coefficients of the best-fit
2728                          function to logger (default is False)
2729            blfile:       Name of a text file in which the best-fit
2730                          parameter values to be written
2731                          (default is "": no file/logger output)
[2012]2732
[1907]2733        Example:
2734            # return a scan baselined by a third order polynomial,
2735            # not using a mask
2736            bscan = scan.poly_baseline(order=3)
2737        """
[1931]2738       
[2186]2739        try:
2740            varlist = vars()
[1931]2741       
[2269]2742            if insitu is None:
2743                insitu = rcParams["insitu"]
[2186]2744            if insitu:
2745                workscan = self
2746            else:
2747                workscan = self.copy()
[1907]2748
[2277]2749            if mask         is None: mask         = [True for i in \
2750                                                       xrange(workscan.nchan())]
[2189]2751            if order        is None: order        = 0
2752            if plot         is None: plot         = False
2753            if getresidual  is None: getresidual  = True
2754            if showprogress is None: showprogress = True
2755            if minnrow      is None: minnrow      = 1000
2756            if outlog       is None: outlog       = False
2757            if blfile       is None: blfile       = ""
[1907]2758
[2012]2759            if plot:
[2269]2760                outblfile = (blfile != "") and \
2761                    os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
2762                if outblfile:
2763                    blf = open(blfile, "a")
[2012]2764               
[1907]2765                f = fitter()
2766                f.set_function(lpoly=order)
[2186]2767               
2768                rows = xrange(workscan.nrow())
2769                #if len(rows) > 0: workscan._init_blinfo()
2770               
[1907]2771                for r in rows:
2772                    f.x = workscan._getabcissa(r)
2773                    f.y = workscan._getspectrum(r)
2774                    f.mask = mask_and(mask, workscan._getmask(r))    # (CAS-1434)
2775                    f.data = None
2776                    f.fit()
2777                   
2778                    f.plot(residual=True)
2779                    accept_fit = raw_input("Accept fit ( [y]/n ): ")
2780                    if accept_fit.upper() == "N":
[2012]2781                        #workscan._append_blinfo(None, None, None)
[1907]2782                        continue
[2012]2783                   
2784                    blpars = f.get_parameters()
2785                    masklist = workscan.get_masklist(f.mask, row=r, silent=True)
2786                    #workscan._append_blinfo(blpars, masklist, f.mask)
[2269]2787                    workscan._setspectrum((f.fitter.getresidual()
2788                                           if getresidual else f.fitter.getfit()), r)
[1907]2789                   
[2012]2790                    if outblfile:
2791                        rms = workscan.get_rms(f.mask, r)
[2269]2792                        dataout = \
2793                            workscan.format_blparams_row(blpars["params"],
2794                                                         blpars["fixed"],
2795                                                         rms, str(masklist),
2796                                                         r, True)
[2012]2797                        blf.write(dataout)
2798
[1907]2799                f._p.unmap()
2800                f._p = None
[2012]2801
2802                if outblfile: blf.close()
[1907]2803            else:
[2269]2804                workscan._poly_baseline(mask, order, getresidual,
2805                                        pack_progress_params(showprogress,
2806                                                             minnrow),
2807                                        outlog, blfile)
[1907]2808           
2809            workscan._add_history("poly_baseline", varlist)
2810           
2811            if insitu:
2812                self._assign(workscan)
2813            else:
2814                return workscan
2815           
[1919]2816        except RuntimeError, e:
[2186]2817            raise_fitting_failure_exception(e)
[1907]2818
[2186]2819    @asaplog_post_dec
[2269]2820    def auto_poly_baseline(self, mask=None, order=None, edge=None,
2821                           threshold=None, chan_avg_limit=None,
2822                           plot=None, insitu=None,
2823                           getresidual=None, showprogress=None,
2824                           minnrow=None, outlog=None, blfile=None):
[1846]2825        """\
[1931]2826        Return a scan which has been baselined (all rows) by a polynomial.
[880]2827        Spectral lines are detected first using linefinder and masked out
2828        to avoid them affecting the baseline solution.
2829
2830        Parameters:
[2189]2831            insitu:         if False a new scantable is returned.
2832                            Otherwise, the scaling is done in-situ
2833                            The default is taken from .asaprc (False)
2834            mask:           an optional mask retreived from scantable
2835            order:          the order of the polynomial (default is 0)
2836            edge:           an optional number of channel to drop at
2837                            the edge of spectrum. If only one value is
2838                            specified, the same number will be dropped
2839                            from both sides of the spectrum. Default
2840                            is to keep all channels. Nested tuples
2841                            represent individual edge selection for
2842                            different IFs (a number of spectral channels
2843                            can be different)
2844            threshold:      the threshold used by line finder. It is
2845                            better to keep it large as only strong lines
2846                            affect the baseline solution.
2847            chan_avg_limit: a maximum number of consequtive spectral
2848                            channels to average during the search of
2849                            weak and broad lines. The default is no
2850                            averaging (and no search for weak lines).
2851                            If such lines can affect the fitted baseline
2852                            (e.g. a high order polynomial is fitted),
2853                            increase this parameter (usually values up
2854                            to 8 are reasonable). Most users of this
2855                            method should find the default value sufficient.
2856            plot:           plot the fit and the residual. In this each
2857                            indivual fit has to be approved, by typing 'y'
2858                            or 'n'
2859            getresidual:    if False, returns best-fit values instead of
2860                            residual. (default is True)
2861            showprogress:   show progress status for large data.
2862                            default is True.
2863            minnrow:        minimum number of input spectra to show.
2864                            default is 1000.
2865            outlog:         Output the coefficients of the best-fit
2866                            function to logger (default is False)
2867            blfile:         Name of a text file in which the best-fit
2868                            parameter values to be written
2869                            (default is "": no file/logger output)
[1846]2870
[2012]2871        Example:
2872            bscan = scan.auto_poly_baseline(order=7, insitu=False)
2873        """
[880]2874
[2186]2875        try:
2876            varlist = vars()
[1846]2877
[2269]2878            if insitu is None:
2879                insitu = rcParams['insitu']
[2186]2880            if insitu:
2881                workscan = self
2882            else:
2883                workscan = self.copy()
[1846]2884
[2277]2885            if mask           is None: mask           = [True for i in xrange(workscan.nchan())]
[2186]2886            if order          is None: order          = 0
2887            if edge           is None: edge           = (0, 0)
2888            if threshold      is None: threshold      = 3
2889            if chan_avg_limit is None: chan_avg_limit = 1
2890            if plot           is None: plot           = False
2891            if getresidual    is None: getresidual    = True
[2189]2892            if showprogress   is None: showprogress   = True
2893            if minnrow        is None: minnrow        = 1000
[2186]2894            if outlog         is None: outlog         = False
2895            if blfile         is None: blfile         = ''
[1846]2896
[2186]2897            edge = normalise_edge_param(edge)
[880]2898
[2012]2899            if plot:
[2269]2900                outblfile = (blfile != "") and \
2901                    os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
[2012]2902                if outblfile: blf = open(blfile, "a")
2903               
[2186]2904                from asap.asaplinefind import linefinder
[2012]2905                fl = linefinder()
[2269]2906                fl.set_options(threshold=threshold, avg_limit=chan_avg_limit)
[2012]2907                fl.set_scan(workscan)
[2186]2908               
[2012]2909                f = fitter()
2910                f.set_function(lpoly=order)
[880]2911
[2186]2912                rows = xrange(workscan.nrow())
2913                #if len(rows) > 0: workscan._init_blinfo()
2914               
[2012]2915                for r in rows:
[2186]2916                    idx = 2*workscan.getif(r)
[2269]2917                    fl.find_lines(r, mask_and(mask, workscan._getmask(r)),
2918                                  edge[idx:idx+2])  # (CAS-1434)
[907]2919
[2012]2920                    f.x = workscan._getabcissa(r)
2921                    f.y = workscan._getspectrum(r)
2922                    f.mask = fl.get_mask()
2923                    f.data = None
2924                    f.fit()
2925
2926                    f.plot(residual=True)
2927                    accept_fit = raw_input("Accept fit ( [y]/n ): ")
2928                    if accept_fit.upper() == "N":
2929                        #workscan._append_blinfo(None, None, None)
2930                        continue
2931
2932                    blpars = f.get_parameters()
2933                    masklist = workscan.get_masklist(f.mask, row=r, silent=True)
2934                    #workscan._append_blinfo(blpars, masklist, f.mask)
[2269]2935                    workscan._setspectrum((f.fitter.getresidual() \
2936                                               if getresidual else f.fitter.getfit()), r)
[2012]2937
2938                    if outblfile:
2939                        rms = workscan.get_rms(f.mask, r)
[2269]2940                        dataout = \
2941                            workscan.format_blparams_row(blpars["params"],
2942                                                         blpars["fixed"],
2943                                                         rms, str(masklist),
2944                                                         r, True)
[2012]2945                        blf.write(dataout)
2946                   
2947                f._p.unmap()
2948                f._p = None
2949
2950                if outblfile: blf.close()
2951            else:
[2269]2952                workscan._auto_poly_baseline(mask, order, edge, threshold,
2953                                             chan_avg_limit, getresidual,
2954                                             pack_progress_params(showprogress,
2955                                                                  minnrow),
2956                                             outlog, blfile)
[2012]2957
2958            workscan._add_history("auto_poly_baseline", varlist)
2959           
2960            if insitu:
2961                self._assign(workscan)
2962            else:
2963                return workscan
2964           
2965        except RuntimeError, e:
[2186]2966            raise_fitting_failure_exception(e)
[2012]2967
2968    def _init_blinfo(self):
2969        """\
2970        Initialise the following three auxiliary members:
2971           blpars : parameters of the best-fit baseline,
2972           masklists : mask data (edge positions of masked channels) and
2973           actualmask : mask data (in boolean list),
2974        to keep for use later (including output to logger/text files).
2975        Used by poly_baseline() and auto_poly_baseline() in case of
2976        'plot=True'.
2977        """
2978        self.blpars = []
2979        self.masklists = []
2980        self.actualmask = []
2981        return
[880]2982
[2012]2983    def _append_blinfo(self, data_blpars, data_masklists, data_actualmask):
2984        """\
2985        Append baseline-fitting related info to blpars, masklist and
2986        actualmask.
2987        """
2988        self.blpars.append(data_blpars)
2989        self.masklists.append(data_masklists)
2990        self.actualmask.append(data_actualmask)
2991        return
2992       
[1862]2993    @asaplog_post_dec
[914]2994    def rotate_linpolphase(self, angle):
[1846]2995        """\
[914]2996        Rotate the phase of the complex polarization O=Q+iU correlation.
2997        This is always done in situ in the raw data.  So if you call this
2998        function more than once then each call rotates the phase further.
[1846]2999
[914]3000        Parameters:
[1846]3001
[914]3002            angle:   The angle (degrees) to rotate (add) by.
[1846]3003
3004        Example::
3005
[914]3006            scan.rotate_linpolphase(2.3)
[1846]3007
[914]3008        """
3009        varlist = vars()
[936]3010        self._math._rotate_linpolphase(self, angle)
[914]3011        self._add_history("rotate_linpolphase", varlist)
3012        return
[710]3013
[1862]3014    @asaplog_post_dec
[914]3015    def rotate_xyphase(self, angle):
[1846]3016        """\
[914]3017        Rotate the phase of the XY correlation.  This is always done in situ
3018        in the data.  So if you call this function more than once
3019        then each call rotates the phase further.
[1846]3020
[914]3021        Parameters:
[1846]3022
[914]3023            angle:   The angle (degrees) to rotate (add) by.
[1846]3024
3025        Example::
3026
[914]3027            scan.rotate_xyphase(2.3)
[1846]3028
[914]3029        """
3030        varlist = vars()
[936]3031        self._math._rotate_xyphase(self, angle)
[914]3032        self._add_history("rotate_xyphase", varlist)
3033        return
3034
[1862]3035    @asaplog_post_dec
[914]3036    def swap_linears(self):
[1846]3037        """\
[1573]3038        Swap the linear polarisations XX and YY, or better the first two
[1348]3039        polarisations as this also works for ciculars.
[914]3040        """
3041        varlist = vars()
[936]3042        self._math._swap_linears(self)
[914]3043        self._add_history("swap_linears", varlist)
3044        return
3045
[1862]3046    @asaplog_post_dec
[914]3047    def invert_phase(self):
[1846]3048        """\
[914]3049        Invert the phase of the complex polarisation
3050        """
3051        varlist = vars()
[936]3052        self._math._invert_phase(self)
[914]3053        self._add_history("invert_phase", varlist)
3054        return
3055
[1862]3056    @asaplog_post_dec
[876]3057    def add(self, offset, insitu=None):
[1846]3058        """\
[513]3059        Return a scan where all spectra have the offset added
[1846]3060
[513]3061        Parameters:
[1846]3062
[513]3063            offset:      the offset
[1855]3064
[513]3065            insitu:      if False a new scantable is returned.
3066                         Otherwise, the scaling is done in-situ
3067                         The default is taken from .asaprc (False)
[1846]3068
[513]3069        """
3070        if insitu is None: insitu = rcParams['insitu']
[876]3071        self._math._setinsitu(insitu)
[513]3072        varlist = vars()
[876]3073        s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]3074        s._add_history("add", varlist)
[876]3075        if insitu:
3076            self._assign(s)
3077        else:
[513]3078            return s
3079
[1862]3080    @asaplog_post_dec
[1308]3081    def scale(self, factor, tsys=True, insitu=None):
[1846]3082        """\
3083
[1938]3084        Return a scan where all spectra are scaled by the given 'factor'
[1846]3085
[513]3086        Parameters:
[1846]3087
[1819]3088            factor:      the scaling factor (float or 1D float list)
[1855]3089
[513]3090            insitu:      if False a new scantable is returned.
3091                         Otherwise, the scaling is done in-situ
3092                         The default is taken from .asaprc (False)
[1855]3093
[513]3094            tsys:        if True (default) then apply the operation to Tsys
3095                         as well as the data
[1846]3096
[513]3097        """
3098        if insitu is None: insitu = rcParams['insitu']
[876]3099        self._math._setinsitu(insitu)
[513]3100        varlist = vars()
[1819]3101        s = None
3102        import numpy
3103        if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
3104            if isinstance(factor[0], list) or isinstance(factor[0], numpy.ndarray):
3105                from asapmath import _array2dOp
3106                s = _array2dOp( self.copy(), factor, "MUL", tsys )
3107            else:
3108                s = scantable( self._math._arrayop( self.copy(), factor, "MUL", tsys ) )
3109        else:
3110            s = scantable(self._math._unaryop(self.copy(), factor, "MUL", tsys))
[1118]3111        s._add_history("scale", varlist)
[876]3112        if insitu:
3113            self._assign(s)
3114        else:
[513]3115            return s
3116
[1504]3117    def set_sourcetype(self, match, matchtype="pattern",
3118                       sourcetype="reference"):
[1846]3119        """\
[1502]3120        Set the type of the source to be an source or reference scan
[1846]3121        using the provided pattern.
3122
[1502]3123        Parameters:
[1846]3124
[1504]3125            match:          a Unix style pattern, regular expression or selector
[1855]3126
[1504]3127            matchtype:      'pattern' (default) UNIX style pattern or
3128                            'regex' regular expression
[1855]3129
[1502]3130            sourcetype:     the type of the source to use (source/reference)
[1846]3131
[1502]3132        """
3133        varlist = vars()
3134        basesel = self.get_selection()
3135        stype = -1
3136        if sourcetype.lower().startswith("r"):
3137            stype = 1
3138        elif sourcetype.lower().startswith("s"):
3139            stype = 0
[1504]3140        else:
[1502]3141            raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
[1504]3142        if matchtype.lower().startswith("p"):
3143            matchtype = "pattern"
3144        elif matchtype.lower().startswith("r"):
3145            matchtype = "regex"
3146        else:
3147            raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
[1502]3148        sel = selector()
3149        if isinstance(match, selector):
3150            sel = match
3151        else:
[1504]3152            sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
[1502]3153        self.set_selection(basesel+sel)
3154        self._setsourcetype(stype)
3155        self.set_selection(basesel)
[1573]3156        self._add_history("set_sourcetype", varlist)
[1502]3157
[1862]3158    @asaplog_post_dec
[1857]3159    @preserve_selection
[1819]3160    def auto_quotient(self, preserve=True, mode='paired', verify=False):
[1846]3161        """\
[670]3162        This function allows to build quotients automatically.
[1819]3163        It assumes the observation to have the same number of
[670]3164        "ons" and "offs"
[1846]3165
[670]3166        Parameters:
[1846]3167
[710]3168            preserve:       you can preserve (default) the continuum or
3169                            remove it.  The equations used are
[1857]3170
[670]3171                            preserve: Output = Toff * (on/off) - Toff
[1857]3172
[1070]3173                            remove:   Output = Toff * (on/off) - Ton
[1855]3174
[1573]3175            mode:           the on/off detection mode
[1348]3176                            'paired' (default)
3177                            identifies 'off' scans by the
3178                            trailing '_R' (Mopra/Parkes) or
3179                            '_e'/'_w' (Tid) and matches
3180                            on/off pairs from the observing pattern
[1502]3181                            'time'
3182                            finds the closest off in time
[1348]3183
[1857]3184        .. todo:: verify argument is not implemented
3185
[670]3186        """
[1857]3187        varlist = vars()
[1348]3188        modes = ["time", "paired"]
[670]3189        if not mode in modes:
[876]3190            msg = "please provide valid mode. Valid modes are %s" % (modes)
3191            raise ValueError(msg)
[1348]3192        s = None
3193        if mode.lower() == "paired":
[1857]3194            sel = self.get_selection()
[1875]3195            sel.set_query("SRCTYPE==psoff")
[1356]3196            self.set_selection(sel)
[1348]3197            offs = self.copy()
[1875]3198            sel.set_query("SRCTYPE==pson")
[1356]3199            self.set_selection(sel)
[1348]3200            ons = self.copy()
3201            s = scantable(self._math._quotient(ons, offs, preserve))
3202        elif mode.lower() == "time":
3203            s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]3204        s._add_history("auto_quotient", varlist)
[876]3205        return s
[710]3206
[1862]3207    @asaplog_post_dec
[1145]3208    def mx_quotient(self, mask = None, weight='median', preserve=True):
[1846]3209        """\
[1143]3210        Form a quotient using "off" beams when observing in "MX" mode.
[1846]3211
[1143]3212        Parameters:
[1846]3213
[1145]3214            mask:           an optional mask to be used when weight == 'stddev'
[1855]3215
[1143]3216            weight:         How to average the off beams.  Default is 'median'.
[1855]3217
[1145]3218            preserve:       you can preserve (default) the continuum or
[1855]3219                            remove it.  The equations used are:
[1846]3220
[1855]3221                                preserve: Output = Toff * (on/off) - Toff
3222
3223                                remove:   Output = Toff * (on/off) - Ton
3224
[1217]3225        """
[1593]3226        mask = mask or ()
[1141]3227        varlist = vars()
3228        on = scantable(self._math._mx_extract(self, 'on'))
[1143]3229        preoff = scantable(self._math._mx_extract(self, 'off'))
3230        off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1217]3231        from asapmath  import quotient
[1145]3232        q = quotient(on, off, preserve)
[1143]3233        q._add_history("mx_quotient", varlist)
[1217]3234        return q
[513]3235
[1862]3236    @asaplog_post_dec
[718]3237    def freq_switch(self, insitu=None):
[1846]3238        """\
[718]3239        Apply frequency switching to the data.
[1846]3240
[718]3241        Parameters:
[1846]3242
[718]3243            insitu:      if False a new scantable is returned.
3244                         Otherwise, the swictching is done in-situ
3245                         The default is taken from .asaprc (False)
[1846]3246
[718]3247        """
3248        if insitu is None: insitu = rcParams['insitu']
[876]3249        self._math._setinsitu(insitu)
[718]3250        varlist = vars()
[876]3251        s = scantable(self._math._freqswitch(self))
[1118]3252        s._add_history("freq_switch", varlist)
[1856]3253        if insitu:
3254            self._assign(s)
3255        else:
3256            return s
[718]3257
[1862]3258    @asaplog_post_dec
[780]3259    def recalc_azel(self):
[1846]3260        """Recalculate the azimuth and elevation for each position."""
[780]3261        varlist = vars()
[876]3262        self._recalcazel()
[780]3263        self._add_history("recalc_azel", varlist)
3264        return
3265
[1862]3266    @asaplog_post_dec
[513]3267    def __add__(self, other):
3268        varlist = vars()
3269        s = None
3270        if isinstance(other, scantable):
[1573]3271            s = scantable(self._math._binaryop(self, other, "ADD"))
[513]3272        elif isinstance(other, float):
[876]3273            s = scantable(self._math._unaryop(self, other, "ADD", False))
[2144]3274        elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3275            if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
3276                from asapmath import _array2dOp
3277                s = _array2dOp( self.copy(), other, "ADD", False )
3278            else:
3279                s = scantable( self._math._arrayop( self.copy(), other, "ADD", False ) )
[513]3280        else:
[718]3281            raise TypeError("Other input is not a scantable or float value")
[513]3282        s._add_history("operator +", varlist)
3283        return s
3284
[1862]3285    @asaplog_post_dec
[513]3286    def __sub__(self, other):
3287        """
3288        implicit on all axes and on Tsys
3289        """
3290        varlist = vars()
3291        s = None
3292        if isinstance(other, scantable):
[1588]3293            s = scantable(self._math._binaryop(self, other, "SUB"))
[513]3294        elif isinstance(other, float):
[876]3295            s = scantable(self._math._unaryop(self, other, "SUB", False))
[2144]3296        elif isinstance(other, list) or isinstance(other, numpy.ndarray):
[2277]3297            if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
[2144]3298                from asapmath import _array2dOp
3299                s = _array2dOp( self.copy(), other, "SUB", False )
3300            else:
[2277]3301                s = scantable( self._math._arrayop( self.copy(), other, "SUB", False ) )
[513]3302        else:
[718]3303            raise TypeError("Other input is not a scantable or float value")
[513]3304        s._add_history("operator -", varlist)
3305        return s
[710]3306
[1862]3307    @asaplog_post_dec
[513]3308    def __mul__(self, other):
3309        """
3310        implicit on all axes and on Tsys
3311        """
3312        varlist = vars()
3313        s = None
3314        if isinstance(other, scantable):
[1588]3315            s = scantable(self._math._binaryop(self, other, "MUL"))
[513]3316        elif isinstance(other, float):
[876]3317            s = scantable(self._math._unaryop(self, other, "MUL", False))
[2144]3318        elif isinstance(other, list) or isinstance(other, numpy.ndarray):
[2277]3319            if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
[2144]3320                from asapmath import _array2dOp
3321                s = _array2dOp( self.copy(), other, "MUL", False )
3322            else:
[2277]3323                s = scantable( self._math._arrayop( self.copy(), other, "MUL", False ) )
[513]3324        else:
[718]3325            raise TypeError("Other input is not a scantable or float value")
[513]3326        s._add_history("operator *", varlist)
3327        return s
3328
[710]3329
[1862]3330    @asaplog_post_dec
[513]3331    def __div__(self, other):
3332        """
3333        implicit on all axes and on Tsys
3334        """
3335        varlist = vars()
3336        s = None
3337        if isinstance(other, scantable):
[1589]3338            s = scantable(self._math._binaryop(self, other, "DIV"))
[513]3339        elif isinstance(other, float):
3340            if other == 0.0:
[718]3341                raise ZeroDivisionError("Dividing by zero is not recommended")
[876]3342            s = scantable(self._math._unaryop(self, other, "DIV", False))
[2144]3343        elif isinstance(other, list) or isinstance(other, numpy.ndarray):
[2277]3344            if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
[2144]3345                from asapmath import _array2dOp
3346                s = _array2dOp( self.copy(), other, "DIV", False )
3347            else:
[2277]3348                s = scantable( self._math._arrayop( self.copy(), other, "DIV", False ) )
[513]3349        else:
[718]3350            raise TypeError("Other input is not a scantable or float value")
[513]3351        s._add_history("operator /", varlist)
3352        return s
3353
[1862]3354    @asaplog_post_dec
[530]3355    def get_fit(self, row=0):
[1846]3356        """\
[530]3357        Print or return the stored fits for a row in the scantable
[1846]3358
[530]3359        Parameters:
[1846]3360
[530]3361            row:    the row which the fit has been applied to.
[1846]3362
[530]3363        """
3364        if row > self.nrow():
3365            return
[976]3366        from asap.asapfit import asapfit
[530]3367        fit = asapfit(self._getfit(row))
[1859]3368        asaplog.push( '%s' %(fit) )
3369        return fit.as_dict()
[530]3370
[1483]3371    def flag_nans(self):
[1846]3372        """\
[1483]3373        Utility function to flag NaN values in the scantable.
3374        """
3375        import numpy
3376        basesel = self.get_selection()
3377        for i in range(self.nrow()):
[1589]3378            sel = self.get_row_selector(i)
3379            self.set_selection(basesel+sel)
[1483]3380            nans = numpy.isnan(self._getspectrum(0))
3381        if numpy.any(nans):
3382            bnans = [ bool(v) for v in nans]
3383            self.flag(bnans)
3384        self.set_selection(basesel)
3385
[1588]3386    def get_row_selector(self, rowno):
[1992]3387        #return selector(beams=self.getbeam(rowno),
3388        #                ifs=self.getif(rowno),
3389        #                pols=self.getpol(rowno),
3390        #                scans=self.getscan(rowno),
3391        #                cycles=self.getcycle(rowno))
3392        return selector(rows=[rowno])
[1573]3393
[484]3394    def _add_history(self, funcname, parameters):
[1435]3395        if not rcParams['scantable.history']:
3396            return
[484]3397        # create date
3398        sep = "##"
3399        from datetime import datetime
3400        dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
3401        hist = dstr+sep
3402        hist += funcname+sep#cdate+sep
3403        if parameters.has_key('self'): del parameters['self']
[1118]3404        for k, v in parameters.iteritems():
[484]3405            if type(v) is dict:
[1118]3406                for k2, v2 in v.iteritems():
[484]3407                    hist += k2
3408                    hist += "="
[1118]3409                    if isinstance(v2, scantable):
[484]3410                        hist += 'scantable'
3411                    elif k2 == 'mask':
[1118]3412                        if isinstance(v2, list) or isinstance(v2, tuple):
[513]3413                            hist += str(self._zip_mask(v2))
3414                        else:
3415                            hist += str(v2)
[484]3416                    else:
[513]3417                        hist += str(v2)
[484]3418            else:
3419                hist += k
3420                hist += "="
[1118]3421                if isinstance(v, scantable):
[484]3422                    hist += 'scantable'
3423                elif k == 'mask':
[1118]3424                    if isinstance(v, list) or isinstance(v, tuple):
[513]3425                        hist += str(self._zip_mask(v))
3426                    else:
3427                        hist += str(v)
[484]3428                else:
3429                    hist += str(v)
3430            hist += sep
3431        hist = hist[:-2] # remove trailing '##'
3432        self._addhistory(hist)
3433
[710]3434
[484]3435    def _zip_mask(self, mask):
3436        mask = list(mask)
3437        i = 0
3438        segments = []
3439        while mask[i:].count(1):
3440            i += mask[i:].index(1)
3441            if mask[i:].count(0):
3442                j = i + mask[i:].index(0)
3443            else:
[710]3444                j = len(mask)
[1118]3445            segments.append([i, j])
[710]3446            i = j
[484]3447        return segments
[714]3448
[626]3449    def _get_ordinate_label(self):
3450        fu = "("+self.get_fluxunit()+")"
3451        import re
3452        lbl = "Intensity"
[1118]3453        if re.match(".K.", fu):
[626]3454            lbl = "Brightness Temperature "+ fu
[1118]3455        elif re.match(".Jy.", fu):
[626]3456            lbl = "Flux density "+ fu
3457        return lbl
[710]3458
[876]3459    def _check_ifs(self):
[1986]3460        #nchans = [self.nchan(i) for i in range(self.nif(-1))]
3461        nchans = [self.nchan(i) for i in self.getifnos()]
[2004]3462        nchans = filter(lambda t: t > 0, nchans)
[876]3463        return (sum(nchans)/len(nchans) == nchans[0])
[976]3464
[1862]3465    @asaplog_post_dec
[1916]3466    #def _fill(self, names, unit, average, getpt, antenna):
3467    def _fill(self, names, unit, average, opts={}):
[976]3468        first = True
3469        fullnames = []
3470        for name in names:
3471            name = os.path.expandvars(name)
3472            name = os.path.expanduser(name)
3473            if not os.path.exists(name):
3474                msg = "File '%s' does not exists" % (name)
3475                raise IOError(msg)
3476            fullnames.append(name)
3477        if average:
3478            asaplog.push('Auto averaging integrations')
[1079]3479        stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]3480        for name in fullnames:
[1073]3481            tbl = Scantable(stype)
[2004]3482            if is_ms( name ):
3483                r = msfiller( tbl )
3484            else:
3485                r = filler( tbl )
3486                rx = rcParams['scantable.reference']
3487                r.setreferenceexpr(rx)
3488            #r = filler(tbl)
3489            #rx = rcParams['scantable.reference']
3490            #r.setreferenceexpr(rx)
[976]3491            msg = "Importing %s..." % (name)
[1118]3492            asaplog.push(msg, False)
[1916]3493            #opts = {'ms': {'antenna' : antenna, 'getpt': getpt} }
[1904]3494            r.open(name, opts)# antenna, -1, -1, getpt)
[1843]3495            r.fill()
[976]3496            if average:
[1118]3497                tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]3498            if not first:
3499                tbl = self._math._merge([self, tbl])
3500            Scantable.__init__(self, tbl)
[1843]3501            r.close()
[1118]3502            del r, tbl
[976]3503            first = False
[1861]3504            #flush log
3505        asaplog.post()
[976]3506        if unit is not None:
3507            self.set_fluxunit(unit)
[1824]3508        if not is_casapy():
3509            self.set_freqframe(rcParams['scantable.freqframe'])
[976]3510
[2012]3511
[1402]3512    def __getitem__(self, key):
3513        if key < 0:
3514            key += self.nrow()
3515        if key >= self.nrow():
3516            raise IndexError("Row index out of range.")
3517        return self._getspectrum(key)
3518
3519    def __setitem__(self, key, value):
3520        if key < 0:
3521            key += self.nrow()
3522        if key >= self.nrow():
3523            raise IndexError("Row index out of range.")
3524        if not hasattr(value, "__len__") or \
3525                len(value) > self.nchan(self.getif(key)):
3526            raise ValueError("Spectrum length doesn't match.")
3527        return self._setspectrum(value, key)
3528
3529    def __len__(self):
3530        return self.nrow()
3531
3532    def __iter__(self):
3533        for i in range(len(self)):
3534            yield self[i]
Note: See TracBrowser for help on using the repository browser.