source: trunk/python/scantable.py @ 2186

Last change on this file since 2186 was 2186, checked in by WataruKawasaki, 13 years ago

New Development: Yes

JIRA Issue: Yes CAS-3149

Ready for Test: Yes

Interface Changes: Yes

What Interface Changed: scantable.*sinusoid_baseline() params

Test Programs:

Put in Release Notes: Yes

Module(s):

Description: (1) Implemented an automated sinusoidal fitting functionality

(2) FFT available with scantable.fft()
(3) fixed a bug of parsing 'edge' param used by linefinder.
(4) a function to show progress status for row-based iterations.


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 130.5 KB
RevLine 
[1846]1"""This module defines the scantable class."""
2
[1697]3import os
[1948]4import numpy
[1691]5try:
6    from functools import wraps as wraps_dec
7except ImportError:
8    from asap.compatibility import wraps as wraps_dec
9
[1824]10from asap.env import is_casapy
[876]11from asap._asap import Scantable
[2004]12from asap._asap import filler, msfiller
[1824]13from asap.parameters import rcParams
[1862]14from asap.logging import asaplog, asaplog_post_dec
[1824]15from asap.selector import selector
16from asap.linecatalog import linecatalog
[1600]17from asap.coordinate import coordinate
[1859]18from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
[1907]19from asap.asapfitter import fitter
[102]20
[1689]21
22def preserve_selection(func):
[1691]23    @wraps_dec(func)
[1689]24    def wrap(obj, *args, **kw):
25        basesel = obj.get_selection()
[1857]26        try:
27            val = func(obj, *args, **kw)
28        finally:
29            obj.set_selection(basesel)
[1689]30        return val
31    return wrap
32
[1846]33def is_scantable(filename):
34    """Is the given file a scantable?
[1689]35
[1846]36    Parameters:
37
38        filename: the name of the file/directory to test
39
40    """
[1883]41    if ( os.path.isdir(filename)
42         and os.path.exists(filename+'/table.info')
43         and os.path.exists(filename+'/table.dat') ):
44        f=open(filename+'/table.info')
45        l=f.readline()
46        f.close()
47        #if ( l.find('Scantable') != -1 ):
48        if ( l.find('Measurement Set') == -1 ):
49            return True
50        else:
51            return False
52    else:
53        return False
54##     return (os.path.isdir(filename)
55##             and not os.path.exists(filename+'/table.f1')
56##             and os.path.exists(filename+'/table.info'))
[1697]57
[1883]58def is_ms(filename):
59    """Is the given file a MeasurementSet?
[1697]60
[1883]61    Parameters:
62
63        filename: the name of the file/directory to test
64
65    """
66    if ( os.path.isdir(filename)
67         and os.path.exists(filename+'/table.info')
68         and os.path.exists(filename+'/table.dat') ):
69        f=open(filename+'/table.info')
70        l=f.readline()
71        f.close()
72        if ( l.find('Measurement Set') != -1 ):
73            return True
74        else:
75            return False
76    else:
77        return False
[2186]78
79def normalise_edge_param(edge):
80    """\
81    Convert a given edge value to a one-dimensional array that can be
82    given to baseline-fitting/subtraction functions.
83    The length of the output value will be an even because values for
84    the both sides of spectra are to be contained for each IF. When
85    the length is 2, the values will be applied to all IFs. If the length
86    is larger than 2, it will be 2*ifnos().
87    Accepted format of edge include:
88            * an integer - will be used for both sides of spectra of all IFs.
89                  e.g. 10 is converted to [10,10]
90            * an empty list/tuple [] - converted to [0, 0] and used for all IFs.
91            * a list/tuple containing an integer - same as the above case.
92                  e.g. [10] is converted to [10,10]
93            * a list/tuple containing two integers - will be used for all IFs.
94                  e.g. [5,10] is output as it is. no need to convert.
95            * a list/tuple of lists/tuples containing TWO integers -
96                  each element of edge will be used for each IF.
97                  e.g. [[5,10],[15,20]] - [5,10] for IF[0] and [15,20] for IF[1].
98                 
99                  If an element contains the same integer values, the input 'edge'
100                  parameter can be given in a simpler shape in the following cases:
101                      ** when len(edge)!=2
102                          any elements containing the same values can be replaced
103                          to single integers.
104                          e.g. [[15,15]] can be simplified to [15] (or [15,15] or 15 also).
105                          e.g. [[1,1],[2,2],[3,3]] can be simplified to [1,2,3].
106                      ** when len(edge)=2
107                          care is needed for this case: ONLY ONE of the
108                          elements can be a single integer,
109                          e.g. [[5,5],[10,10]] can be simplified to [5,[10,10]]
110                               or [[5,5],10], but can NOT be simplified to [5,10].
111                               when [5,10] given, it is interpreted as
112                               [[5,10],[5,10],[5,10],...] instead, as shown before.
113    """
114    from asap import _is_sequence_or_number as _is_valid
115    if isinstance(edge, list) or isinstance(edge, tuple):
116        for edgepar in edge:
117            if not _is_valid(edgepar, int):
118                raise ValueError, "Each element of the 'edge' tuple has \
119                                   to be a pair of integers or an integer."
120            if isinstance(edgepar, list) or isinstance(edgepar, tuple):
121                if len(edgepar) != 2:
122                    raise ValueError, "Each element of the 'edge' tuple has \
123                                       to be a pair of integers or an integer."
124    else:
125        if not _is_valid(edge, int):
126            raise ValueError, "Parameter 'edge' has to be an integer or a \
127                               pair of integers specified as a tuple. \
128                               Nested tuples are allowed \
129                               to make individual selection for different IFs."
130       
131
132    if isinstance(edge, int):
133        edge = [ edge, edge ]                 # e.g. 3   => [3,3]
134    elif isinstance(edge, list) or isinstance(edge, tuple):
135        if len(edge) == 0:
136            edge = [0, 0]                     # e.g. []  => [0,0]
137        elif len(edge) == 1:
138            if isinstance(edge[0], int):
139                edge = [ edge[0], edge[0] ]   # e.g. [1] => [1,1]
140
141    commonedge = True
142    if len(edge) > 2: commonedge = False
143    else:
144        for edgepar in edge:
145            if isinstance(edgepar, list) or isinstance(edgepar, tuple):
146                commonedge = False
147                break
148
149    if commonedge:
150        if len(edge) > 1:
151            norm_edge = edge
152        else:
153            norm_edge = edge + edge
154    else:
155        norm_edge = []
156        for edgepar in edge:
157            if isinstance(edgepar, int):
158                norm_edge += [edgepar, edgepar]
159            else:
160                norm_edge += edgepar
161
162    return norm_edge
163
164def raise_fitting_failure_exception(e):
165    msg = "The fit failed, possibly because it didn't converge."
166    if rcParams["verbose"]:
167        asaplog.push(str(e))
168        asaplog.push(str(msg))
169    else:
170        raise RuntimeError(str(e)+'\n'+msg)
[1883]171   
[2186]172
[876]173class scantable(Scantable):
[1846]174    """\
175        The ASAP container for scans (single-dish data).
[102]176    """
[1819]177
[1862]178    @asaplog_post_dec
[1916]179    #def __init__(self, filename, average=None, unit=None, getpt=None,
180    #             antenna=None, parallactify=None):
181    def __init__(self, filename, average=None, unit=None, parallactify=None, **args):
[1846]182        """\
[102]183        Create a scantable from a saved one or make a reference
[1846]184
[102]185        Parameters:
[1846]186
187            filename:     the name of an asap table on disk
188                          or
189                          the name of a rpfits/sdfits/ms file
190                          (integrations within scans are auto averaged
191                          and the whole file is read) or
192                          [advanced] a reference to an existing scantable
193
194            average:      average all integrations withinb a scan on read.
195                          The default (True) is taken from .asaprc.
196
[484]197            unit:         brightness unit; must be consistent with K or Jy.
[1846]198                          Over-rides the default selected by the filler
199                          (input rpfits/sdfits/ms) or replaces the value
200                          in existing scantables
201
202            getpt:        for MeasurementSet input data only:
203                          If True, all pointing data are filled.
204                          The deafult is False, which makes time to load
205                          the MS data faster in some cases.
206
[1920]207            antenna:      for MeasurementSet input data only:
208                          Antenna selection. integer (id) or string (name or id).
[1846]209
210            parallactify: Indicate that the data had been parallatified. Default
211                          is taken from rc file.
212
[710]213        """
[976]214        if average is None:
[710]215            average = rcParams['scantable.autoaverage']
[1916]216        #if getpt is None:
217        #    getpt = True
218        #if antenna is not None:
219        #    asaplog.push("Antenna selection currently unsupported."
220        #                 "Using ''")
221        #    asaplog.post('WARN')
222        #if antenna is None:
223        #    antenna = ''
224        #elif type(antenna) == int:
225        #    antenna = '%s' % antenna
226        #elif type(antenna) == list:
227        #    tmpstr = ''
228        #    for i in range( len(antenna) ):
229        #        if type(antenna[i]) == int:
230        #            tmpstr = tmpstr + ('%s,'%(antenna[i]))
231        #        elif type(antenna[i]) == str:
232        #            tmpstr=tmpstr+antenna[i]+','
233        #        else:
234        #            raise TypeError('Bad antenna selection.')
235        #    antenna = tmpstr.rstrip(',')
[1593]236        parallactify = parallactify or rcParams['scantable.parallactify']
[1259]237        varlist = vars()
[876]238        from asap._asap import stmath
[1819]239        self._math = stmath( rcParams['insitu'] )
[876]240        if isinstance(filename, Scantable):
241            Scantable.__init__(self, filename)
[181]242        else:
[1697]243            if isinstance(filename, str):
[976]244                filename = os.path.expandvars(filename)
245                filename = os.path.expanduser(filename)
246                if not os.path.exists(filename):
247                    s = "File '%s' not found." % (filename)
248                    raise IOError(s)
[1697]249                if is_scantable(filename):
250                    ondisk = rcParams['scantable.storage'] == 'disk'
251                    Scantable.__init__(self, filename, ondisk)
252                    if unit is not None:
253                        self.set_fluxunit(unit)
[2008]254                    if average:
255                        self._assign( self.average_time( scanav=True ) )
[1819]256                    # do not reset to the default freqframe
257                    #self.set_freqframe(rcParams['scantable.freqframe'])
[1883]258                #elif os.path.isdir(filename) \
259                #         and not os.path.exists(filename+'/table.f1'):
260                elif is_ms(filename):
[1916]261                    # Measurement Set
262                    opts={'ms': {}}
263                    mskeys=['getpt','antenna']
264                    for key in mskeys:
265                        if key in args.keys():
266                            opts['ms'][key] = args[key]
267                    #self._fill([filename], unit, average, getpt, antenna)
268                    self._fill([filename], unit, average, opts)
[1893]269                elif os.path.isfile(filename):
[1916]270                    #self._fill([filename], unit, average, getpt, antenna)
271                    self._fill([filename], unit, average)
[1883]272                else:
[1819]273                    msg = "The given file '%s'is not a valid " \
274                          "asap table." % (filename)
[1859]275                    raise IOError(msg)
[1118]276            elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]277                  and isinstance(filename[-1], str):
[1916]278                #self._fill(filename, unit, average, getpt, antenna)
279                self._fill(filename, unit, average)
[1586]280        self.parallactify(parallactify)
[1259]281        self._add_history("scantable", varlist)
[102]282
[1862]283    @asaplog_post_dec
[876]284    def save(self, name=None, format=None, overwrite=False):
[1846]285        """\
[1280]286        Store the scantable on disk. This can be an asap (aips++) Table,
287        SDFITS or MS2 format.
[1846]288
[116]289        Parameters:
[1846]290
[1093]291            name:        the name of the outputfile. For format "ASCII"
292                         this is the root file name (data in 'name'.txt
[497]293                         and header in 'name'_header.txt)
[1855]294
[116]295            format:      an optional file format. Default is ASAP.
[1855]296                         Allowed are:
297
298                            * 'ASAP' (save as ASAP [aips++] Table),
299                            * 'SDFITS' (save as SDFITS file)
300                            * 'ASCII' (saves as ascii text file)
301                            * 'MS2' (saves as an casacore MeasurementSet V2)
302                            * 'FITS' (save as image FITS - not readable by class)
303                            * 'CLASS' (save as FITS readable by CLASS)
304
[411]305            overwrite:   If the file should be overwritten if it exists.
[256]306                         The default False is to return with warning
[411]307                         without writing the output. USE WITH CARE.
[1855]308
[1846]309        Example::
310
[116]311            scan.save('myscan.asap')
[1118]312            scan.save('myscan.sdfits', 'SDFITS')
[1846]313
[116]314        """
[411]315        from os import path
[1593]316        format = format or rcParams['scantable.save']
[256]317        suffix = '.'+format.lower()
[1118]318        if name is None or name == "":
[256]319            name = 'scantable'+suffix
[718]320            msg = "No filename given. Using default name %s..." % name
321            asaplog.push(msg)
[411]322        name = path.expandvars(name)
[256]323        if path.isfile(name) or path.isdir(name):
324            if not overwrite:
[718]325                msg = "File %s exists." % name
[1859]326                raise IOError(msg)
[451]327        format2 = format.upper()
328        if format2 == 'ASAP':
[116]329            self._save(name)
[2029]330        elif format2 == 'MS2':
331            msopt = {'ms': {'overwrite': overwrite } }
332            from asap._asap import mswriter
333            writer = mswriter( self )
334            writer.write( name, msopt )
[116]335        else:
[989]336            from asap._asap import stwriter as stw
[1118]337            writer = stw(format2)
338            writer.write(self, name)
[116]339        return
340
[102]341    def copy(self):
[1846]342        """Return a copy of this scantable.
343
344        *Note*:
345
[1348]346            This makes a full (deep) copy. scan2 = scan1 makes a reference.
[1846]347
348        Example::
349
[102]350            copiedscan = scan.copy()
[1846]351
[102]352        """
[876]353        sd = scantable(Scantable._copy(self))
[113]354        return sd
355
[1093]356    def drop_scan(self, scanid=None):
[1846]357        """\
[1093]358        Return a new scantable where the specified scan number(s) has(have)
359        been dropped.
[1846]360
[1093]361        Parameters:
[1846]362
[1093]363            scanid:    a (list of) scan number(s)
[1846]364
[1093]365        """
366        from asap import _is_sequence_or_number as _is_valid
367        from asap import _to_list
368        from asap import unique
369        if not _is_valid(scanid):
[1859]370            raise RuntimeError( 'Please specify a scanno to drop from the scantable' )
371        scanid = _to_list(scanid)
372        allscans = unique([ self.getscan(i) for i in range(self.nrow())])
373        for sid in scanid: allscans.remove(sid)
374        if len(allscans) == 0:
375            raise ValueError("Can't remove all scans")
376        sel = selector(scans=allscans)
377        return self._select_copy(sel)
[1093]378
[1594]379    def _select_copy(self, selection):
380        orig = self.get_selection()
381        self.set_selection(orig+selection)
382        cp = self.copy()
383        self.set_selection(orig)
384        return cp
385
[102]386    def get_scan(self, scanid=None):
[1855]387        """\
[102]388        Return a specific scan (by scanno) or collection of scans (by
389        source name) in a new scantable.
[1846]390
391        *Note*:
392
[1348]393            See scantable.drop_scan() for the inverse operation.
[1846]394
[102]395        Parameters:
[1846]396
[513]397            scanid:    a (list of) scanno or a source name, unix-style
398                       patterns are accepted for source name matching, e.g.
399                       '*_R' gets all 'ref scans
[1846]400
401        Example::
402
[513]403            # get all scans containing the source '323p459'
404            newscan = scan.get_scan('323p459')
405            # get all 'off' scans
406            refscans = scan.get_scan('*_R')
407            # get a susbset of scans by scanno (as listed in scan.summary())
[1118]408            newscan = scan.get_scan([0, 2, 7, 10])
[1846]409
[102]410        """
411        if scanid is None:
[1859]412            raise RuntimeError( 'Please specify a scan no or name to '
413                                'retrieve from the scantable' )
[102]414        try:
[946]415            bsel = self.get_selection()
416            sel = selector()
[102]417            if type(scanid) is str:
[946]418                sel.set_name(scanid)
[1594]419                return self._select_copy(sel)
[102]420            elif type(scanid) is int:
[946]421                sel.set_scans([scanid])
[1594]422                return self._select_copy(sel)
[381]423            elif type(scanid) is list:
[946]424                sel.set_scans(scanid)
[1594]425                return self._select_copy(sel)
[381]426            else:
[718]427                msg = "Illegal scanid type, use 'int' or 'list' if ints."
[1859]428                raise TypeError(msg)
[102]429        except RuntimeError:
[1859]430            raise
[102]431
432    def __str__(self):
[2178]433        return Scantable._summary(self)
[102]434
[976]435    def summary(self, filename=None):
[1846]436        """\
[102]437        Print a summary of the contents of this scantable.
[1846]438
[102]439        Parameters:
[1846]440
[1931]441            filename:    the name of a file to write the putput to
[102]442                         Default - no file output
[1846]443
[102]444        """
[2178]445        info = Scantable._summary(self)
[102]446        if filename is not None:
[256]447            if filename is "":
448                filename = 'scantable_summary.txt'
[415]449            from os.path import expandvars, isdir
[411]450            filename = expandvars(filename)
[415]451            if not isdir(filename):
[413]452                data = open(filename, 'w')
453                data.write(info)
454                data.close()
455            else:
[718]456                msg = "Illegal file name '%s'." % (filename)
[1859]457                raise IOError(msg)
458        return page(info)
[710]459
[1512]460    def get_spectrum(self, rowno):
[1471]461        """Return the spectrum for the current row in the scantable as a list.
[1846]462
[1471]463        Parameters:
[1846]464
[1573]465             rowno:   the row number to retrieve the spectrum from
[1846]466
[1471]467        """
468        return self._getspectrum(rowno)
[946]469
[1471]470    def get_mask(self, rowno):
471        """Return the mask for the current row in the scantable as a list.
[1846]472
[1471]473        Parameters:
[1846]474
[1573]475             rowno:   the row number to retrieve the mask from
[1846]476
[1471]477        """
478        return self._getmask(rowno)
479
480    def set_spectrum(self, spec, rowno):
[1938]481        """Set the spectrum for the current row in the scantable.
[1846]482
[1471]483        Parameters:
[1846]484
[1855]485             spec:   the new spectrum
[1846]486
[1855]487             rowno:  the row number to set the spectrum for
488
[1471]489        """
490        assert(len(spec) == self.nchan())
491        return self._setspectrum(spec, rowno)
492
[1600]493    def get_coordinate(self, rowno):
494        """Return the (spectral) coordinate for a a given 'rowno'.
[1846]495
496        *Note*:
497
[1600]498            * This coordinate is only valid until a scantable method modifies
499              the frequency axis.
500            * This coordinate does contain the original frequency set-up
501              NOT the new frame. The conversions however are done using the user
502              specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
503              use scantable.freq_align first. Without it there is no closure,
[1846]504              i.e.::
[1600]505
[1846]506                  c = myscan.get_coordinate(0)
507                  c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
508
[1600]509        Parameters:
[1846]510
[1600]511             rowno:    the row number for the spectral coordinate
512
513        """
514        return coordinate(Scantable.get_coordinate(self, rowno))
515
[946]516    def get_selection(self):
[1846]517        """\
[1005]518        Get the selection object currently set on this scantable.
[1846]519
520        Example::
521
[1005]522            sel = scan.get_selection()
523            sel.set_ifs(0)              # select IF 0
524            scan.set_selection(sel)     # apply modified selection
[1846]525
[946]526        """
527        return selector(self._getselection())
528
[1576]529    def set_selection(self, selection=None, **kw):
[1846]530        """\
[1005]531        Select a subset of the data. All following operations on this scantable
532        are only applied to thi selection.
[1846]533
[1005]534        Parameters:
[1697]535
[1846]536            selection:    a selector object (default unset the selection), or
537                          any combination of "pols", "ifs", "beams", "scans",
538                          "cycles", "name", "query"
[1697]539
[1846]540        Examples::
[1697]541
[1005]542            sel = selector()         # create a selection object
[1118]543            self.set_scans([0, 3])    # select SCANNO 0 and 3
[1005]544            scan.set_selection(sel)  # set the selection
545            scan.summary()           # will only print summary of scanno 0 an 3
546            scan.set_selection()     # unset the selection
[1697]547            # or the equivalent
548            scan.set_selection(scans=[0,3])
549            scan.summary()           # will only print summary of scanno 0 an 3
550            scan.set_selection()     # unset the selection
[1846]551
[946]552        """
[1576]553        if selection is None:
554            # reset
555            if len(kw) == 0:
556                selection = selector()
557            else:
558                # try keywords
559                for k in kw:
560                    if k not in selector.fields:
561                        raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
562                selection = selector(**kw)
[946]563        self._setselection(selection)
564
[1819]565    def get_row(self, row=0, insitu=None):
[1846]566        """\
[1819]567        Select a row in the scantable.
568        Return a scantable with single row.
[1846]569
[1819]570        Parameters:
[1846]571
572            row:    row no of integration, default is 0.
573            insitu: if False a new scantable is returned. Otherwise, the
574                    scaling is done in-situ. The default is taken from .asaprc
575                    (False)
576
[1819]577        """
578        if insitu is None: insitu = rcParams['insitu']
579        if not insitu:
580            workscan = self.copy()
581        else:
582            workscan = self
583        # Select a row
584        sel=selector()
[1992]585        sel.set_rows([row])
586        #sel.set_scans([workscan.getscan(row)])
587        #sel.set_cycles([workscan.getcycle(row)])
588        #sel.set_beams([workscan.getbeam(row)])
589        #sel.set_ifs([workscan.getif(row)])
590        #sel.set_polarisations([workscan.getpol(row)])
591        #sel.set_name(workscan._getsourcename(row))
[1819]592        workscan.set_selection(sel)
593        if not workscan.nrow() == 1:
594            msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
595            raise RuntimeError(msg)
596        del sel
597        if insitu:
598            self._assign(workscan)
599        else:
600            return workscan
601
[1862]602    @asaplog_post_dec
[1907]603    def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
[1846]604        """\
[135]605        Determine the specified statistic of the current beam/if/pol
[102]606        Takes a 'mask' as an optional parameter to specify which
607        channels should be excluded.
[1846]608
[102]609        Parameters:
[1846]610
[1819]611            stat:    'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
612                     'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
[1855]613
[135]614            mask:    an optional mask specifying where the statistic
[102]615                     should be determined.
[1855]616
[1819]617            form:    format string to print statistic values
[1846]618
[1907]619            row:     row number of spectrum to process.
620                     (default is None: for all rows)
[1846]621
[1907]622        Example:
[113]623            scan.set_unit('channel')
[1118]624            msk = scan.create_mask([100, 200], [500, 600])
[135]625            scan.stats(stat='mean', mask=m)
[1846]626
[102]627        """
[1593]628        mask = mask or []
[876]629        if not self._check_ifs():
[1118]630            raise ValueError("Cannot apply mask as the IFs have different "
631                             "number of channels. Please use setselection() "
632                             "to select individual IFs")
[1819]633        rtnabc = False
634        if stat.lower().endswith('_abc'): rtnabc = True
635        getchan = False
636        if stat.lower().startswith('min') or stat.lower().startswith('max'):
637            chan = self._math._minmaxchan(self, mask, stat)
638            getchan = True
639            statvals = []
[1907]640        if not rtnabc:
641            if row == None:
642                statvals = self._math._stats(self, mask, stat)
643            else:
644                statvals = self._math._statsrow(self, mask, stat, int(row))
[256]645
[1819]646        #def cb(i):
647        #    return statvals[i]
[256]648
[1819]649        #return self._row_callback(cb, stat)
[102]650
[1819]651        label=stat
652        #callback=cb
653        out = ""
654        #outvec = []
655        sep = '-'*50
[1907]656
657        if row == None:
658            rows = xrange(self.nrow())
659        elif isinstance(row, int):
660            rows = [ row ]
661
662        for i in rows:
[1819]663            refstr = ''
664            statunit= ''
665            if getchan:
666                qx, qy = self.chan2data(rowno=i, chan=chan[i])
667                if rtnabc:
668                    statvals.append(qx['value'])
669                    refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
670                    statunit= '['+qx['unit']+']'
671                else:
672                    refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
673
674            tm = self._gettime(i)
675            src = self._getsourcename(i)
676            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
677            out += 'Time[%s]:\n' % (tm)
[1907]678            if self.nbeam(-1) > 1: out +=  ' Beam[%d] ' % (self.getbeam(i))
679            if self.nif(-1) > 1:   out +=  ' IF[%d] ' % (self.getif(i))
680            if self.npol(-1) > 1:  out +=  ' Pol[%d] ' % (self.getpol(i))
[1819]681            #outvec.append(callback(i))
[1907]682            if len(rows) > 1:
683                # out += ('= %'+form) % (outvec[i]) +'   '+refstr+'\n'
684                out += ('= %'+form) % (statvals[i]) +'   '+refstr+'\n'
685            else:
686                # out += ('= %'+form) % (outvec[0]) +'   '+refstr+'\n'
687                out += ('= %'+form) % (statvals[0]) +'   '+refstr+'\n'
[1819]688            out +=  sep+"\n"
689
[1859]690        import os
691        if os.environ.has_key( 'USER' ):
692            usr = os.environ['USER']
693        else:
694            import commands
695            usr = commands.getoutput( 'whoami' )
696        tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
697        f = open(tmpfile,'w')
698        print >> f, sep
699        print >> f, ' %s %s' % (label, statunit)
700        print >> f, sep
701        print >> f, out
702        f.close()
703        f = open(tmpfile,'r')
704        x = f.readlines()
705        f.close()
706        asaplog.push(''.join(x), False)
707
[1819]708        return statvals
709
710    def chan2data(self, rowno=0, chan=0):
[1846]711        """\
[1819]712        Returns channel/frequency/velocity and spectral value
713        at an arbitrary row and channel in the scantable.
[1846]714
[1819]715        Parameters:
[1846]716
[1819]717            rowno:   a row number in the scantable. Default is the
718                     first row, i.e. rowno=0
[1855]719
[1819]720            chan:    a channel in the scantable. Default is the first
721                     channel, i.e. pos=0
[1846]722
[1819]723        """
724        if isinstance(rowno, int) and isinstance(chan, int):
725            qx = {'unit': self.get_unit(),
726                  'value': self._getabcissa(rowno)[chan]}
727            qy = {'unit': self.get_fluxunit(),
728                  'value': self._getspectrum(rowno)[chan]}
729            return qx, qy
730
[1118]731    def stddev(self, mask=None):
[1846]732        """\
[135]733        Determine the standard deviation of the current beam/if/pol
734        Takes a 'mask' as an optional parameter to specify which
735        channels should be excluded.
[1846]736
[135]737        Parameters:
[1846]738
[135]739            mask:    an optional mask specifying where the standard
740                     deviation should be determined.
741
[1846]742        Example::
743
[135]744            scan.set_unit('channel')
[1118]745            msk = scan.create_mask([100, 200], [500, 600])
[135]746            scan.stddev(mask=m)
[1846]747
[135]748        """
[1118]749        return self.stats(stat='stddev', mask=mask);
[135]750
[1003]751
[1259]752    def get_column_names(self):
[1846]753        """\
[1003]754        Return a  list of column names, which can be used for selection.
755        """
[1259]756        return list(Scantable.get_column_names(self))
[1003]757
[1730]758    def get_tsys(self, row=-1):
[1846]759        """\
[113]760        Return the System temperatures.
[1846]761
762        Parameters:
763
764            row:    the rowno to get the information for. (default all rows)
765
[113]766        Returns:
[1846]767
[876]768            a list of Tsys values for the current selection
[1846]769
[113]770        """
[1730]771        if row > -1:
772            return self._get_column(self._gettsys, row)
[876]773        return self._row_callback(self._gettsys, "Tsys")
[256]774
[1730]775
776    def get_weather(self, row=-1):
[1846]777        """\
778        Return the weather informations.
779
780        Parameters:
781
782            row:    the rowno to get the information for. (default all rows)
783
784        Returns:
785
786            a dict or list of of dicts of values for the current selection
787
788        """
789
[1730]790        values = self._get_column(self._get_weather, row)
791        if row > -1:
792            return {'temperature': values[0],
793                    'pressure': values[1], 'humidity' : values[2],
794                    'windspeed' : values[3], 'windaz' : values[4]
795                    }
796        else:
797            out = []
798            for r in values:
799
800                out.append({'temperature': r[0],
801                            'pressure': r[1], 'humidity' : r[2],
802                            'windspeed' : r[3], 'windaz' : r[4]
803                    })
804            return out
805
[876]806    def _row_callback(self, callback, label):
807        out = ""
[1118]808        outvec = []
[1590]809        sep = '-'*50
[876]810        for i in range(self.nrow()):
811            tm = self._gettime(i)
812            src = self._getsourcename(i)
[1590]813            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
[876]814            out += 'Time[%s]:\n' % (tm)
[1590]815            if self.nbeam(-1) > 1:
816                out +=  ' Beam[%d] ' % (self.getbeam(i))
817            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (self.getif(i))
818            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (self.getpol(i))
[876]819            outvec.append(callback(i))
820            out += '= %3.3f\n' % (outvec[i])
[1590]821            out +=  sep+'\n'
[1859]822
823        asaplog.push(sep)
824        asaplog.push(" %s" % (label))
825        asaplog.push(sep)
826        asaplog.push(out)
[1861]827        asaplog.post()
[1175]828        return outvec
[256]829
[1947]830    def _get_column(self, callback, row=-1, *args):
[1070]831        """
832        """
833        if row == -1:
[1947]834            return [callback(i, *args) for i in range(self.nrow())]
[1070]835        else:
[1819]836            if  0 <= row < self.nrow():
[1947]837                return callback(row, *args)
[256]838
[1070]839
[1948]840    def get_time(self, row=-1, asdatetime=False, prec=-1):
[1846]841        """\
[113]842        Get a list of time stamps for the observations.
[1938]843        Return a datetime object or a string (default) for each
844        integration time stamp in the scantable.
[1846]845
[113]846        Parameters:
[1846]847
[1348]848            row:          row no of integration. Default -1 return all rows
[1855]849
[1348]850            asdatetime:   return values as datetime objects rather than strings
[1846]851
[1948]852            prec:         number of digits shown. Default -1 to automatic calculation.
853                          Note this number is equals to the digits of MVTime,
854                          i.e., 0<prec<3: dates with hh:: only,
855                          <5: with hh:mm:, <7 or 0: with hh:mm:ss,
[1947]856                          and 6> : with hh:mm:ss.tt... (prec-6 t's added)
857
[113]858        """
[1175]859        from datetime import datetime
[1948]860        if prec < 0:
861            # automagically set necessary precision +1
[1950]862            prec = 7 - numpy.floor(numpy.log10(numpy.min(self.get_inttime(row))))
[1948]863            prec = max(6, int(prec))
864        else:
865            prec = max(0, prec)
866        if asdatetime:
867            #precision can be 1 millisecond at max
868            prec = min(12, prec)
869
[1947]870        times = self._get_column(self._gettime, row, prec)
[1348]871        if not asdatetime:
[1392]872            return times
[1947]873        format = "%Y/%m/%d/%H:%M:%S.%f"
874        if prec < 7:
875            nsub = 1 + (((6-prec)/2) % 3)
876            substr = [".%f","%S","%M"]
877            for i in range(nsub):
878                format = format.replace(substr[i],"")
[1175]879        if isinstance(times, list):
[1947]880            return [datetime.strptime(i, format) for i in times]
[1175]881        else:
[1947]882            return datetime.strptime(times, format)
[102]883
[1348]884
885    def get_inttime(self, row=-1):
[1846]886        """\
[1348]887        Get a list of integration times for the observations.
888        Return a time in seconds for each integration in the scantable.
[1846]889
[1348]890        Parameters:
[1846]891
[1348]892            row:    row no of integration. Default -1 return all rows.
[1846]893
[1348]894        """
[1573]895        return self._get_column(self._getinttime, row)
[1348]896
[1573]897
[714]898    def get_sourcename(self, row=-1):
[1846]899        """\
[794]900        Get a list source names for the observations.
[714]901        Return a string for each integration in the scantable.
902        Parameters:
[1846]903
[1348]904            row:    row no of integration. Default -1 return all rows.
[1846]905
[714]906        """
[1070]907        return self._get_column(self._getsourcename, row)
[714]908
[794]909    def get_elevation(self, row=-1):
[1846]910        """\
[794]911        Get a list of elevations for the observations.
912        Return a float for each integration in the scantable.
[1846]913
[794]914        Parameters:
[1846]915
[1348]916            row:    row no of integration. Default -1 return all rows.
[1846]917
[794]918        """
[1070]919        return self._get_column(self._getelevation, row)
[794]920
921    def get_azimuth(self, row=-1):
[1846]922        """\
[794]923        Get a list of azimuths for the observations.
924        Return a float for each integration in the scantable.
[1846]925
[794]926        Parameters:
[1348]927            row:    row no of integration. Default -1 return all rows.
[1846]928
[794]929        """
[1070]930        return self._get_column(self._getazimuth, row)
[794]931
932    def get_parangle(self, row=-1):
[1846]933        """\
[794]934        Get a list of parallactic angles for the observations.
935        Return a float for each integration in the scantable.
[1846]936
[794]937        Parameters:
[1846]938
[1348]939            row:    row no of integration. Default -1 return all rows.
[1846]940
[794]941        """
[1070]942        return self._get_column(self._getparangle, row)
[794]943
[1070]944    def get_direction(self, row=-1):
945        """
946        Get a list of Positions on the sky (direction) for the observations.
[1594]947        Return a string for each integration in the scantable.
[1855]948
[1070]949        Parameters:
[1855]950
[1070]951            row:    row no of integration. Default -1 return all rows
[1855]952
[1070]953        """
954        return self._get_column(self._getdirection, row)
955
[1391]956    def get_directionval(self, row=-1):
[1846]957        """\
[1391]958        Get a list of Positions on the sky (direction) for the observations.
959        Return a float for each integration in the scantable.
[1846]960
[1391]961        Parameters:
[1846]962
[1391]963            row:    row no of integration. Default -1 return all rows
[1846]964
[1391]965        """
966        return self._get_column(self._getdirectionvec, row)
967
[1862]968    @asaplog_post_dec
[102]969    def set_unit(self, unit='channel'):
[1846]970        """\
[102]971        Set the unit for all following operations on this scantable
[1846]972
[102]973        Parameters:
[1846]974
975            unit:    optional unit, default is 'channel'. Use one of '*Hz',
976                     'km/s', 'channel' or equivalent ''
977
[102]978        """
[484]979        varlist = vars()
[1118]980        if unit in ['', 'pixel', 'channel']:
[113]981            unit = ''
982        inf = list(self._getcoordinfo())
983        inf[0] = unit
984        self._setcoordinfo(inf)
[1118]985        self._add_history("set_unit", varlist)
[113]986
[1862]987    @asaplog_post_dec
[484]988    def set_instrument(self, instr):
[1846]989        """\
[1348]990        Set the instrument for subsequent processing.
[1846]991
[358]992        Parameters:
[1846]993
[710]994            instr:    Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]995                      'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[1846]996
[358]997        """
998        self._setInstrument(instr)
[1118]999        self._add_history("set_instument", vars())
[358]1000
[1862]1001    @asaplog_post_dec
[1190]1002    def set_feedtype(self, feedtype):
[1846]1003        """\
[1190]1004        Overwrite the feed type, which might not be set correctly.
[1846]1005
[1190]1006        Parameters:
[1846]1007
[1190]1008            feedtype:     'linear' or 'circular'
[1846]1009
[1190]1010        """
1011        self._setfeedtype(feedtype)
1012        self._add_history("set_feedtype", vars())
1013
[1862]1014    @asaplog_post_dec
[276]1015    def set_doppler(self, doppler='RADIO'):
[1846]1016        """\
[276]1017        Set the doppler for all following operations on this scantable.
[1846]1018
[276]1019        Parameters:
[1846]1020
[276]1021            doppler:    One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
[1846]1022
[276]1023        """
[484]1024        varlist = vars()
[276]1025        inf = list(self._getcoordinfo())
1026        inf[2] = doppler
1027        self._setcoordinfo(inf)
[1118]1028        self._add_history("set_doppler", vars())
[710]1029
[1862]1030    @asaplog_post_dec
[226]1031    def set_freqframe(self, frame=None):
[1846]1032        """\
[113]1033        Set the frame type of the Spectral Axis.
[1846]1034
[113]1035        Parameters:
[1846]1036
[591]1037            frame:   an optional frame type, default 'LSRK'. Valid frames are:
[1819]1038                     'TOPO', 'LSRD', 'LSRK', 'BARY',
[1118]1039                     'GEO', 'GALACTO', 'LGROUP', 'CMB'
[1846]1040
1041        Example::
1042
[113]1043            scan.set_freqframe('BARY')
[1846]1044
[113]1045        """
[1593]1046        frame = frame or rcParams['scantable.freqframe']
[484]1047        varlist = vars()
[1819]1048        # "REST" is not implemented in casacore
1049        #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
1050        #           'GEO', 'GALACTO', 'LGROUP', 'CMB']
1051        valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
[1118]1052                   'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]1053
[989]1054        if frame in valid:
[113]1055            inf = list(self._getcoordinfo())
1056            inf[1] = frame
1057            self._setcoordinfo(inf)
[1118]1058            self._add_history("set_freqframe", varlist)
[102]1059        else:
[1118]1060            msg  = "Please specify a valid freq type. Valid types are:\n", valid
[1859]1061            raise TypeError(msg)
[710]1062
[1862]1063    @asaplog_post_dec
[989]1064    def set_dirframe(self, frame=""):
[1846]1065        """\
[989]1066        Set the frame type of the Direction on the sky.
[1846]1067
[989]1068        Parameters:
[1846]1069
[989]1070            frame:   an optional frame type, default ''. Valid frames are:
1071                     'J2000', 'B1950', 'GALACTIC'
[1846]1072
1073        Example:
1074
[989]1075            scan.set_dirframe('GALACTIC')
[1846]1076
[989]1077        """
1078        varlist = vars()
[1859]1079        Scantable.set_dirframe(self, frame)
[1118]1080        self._add_history("set_dirframe", varlist)
[989]1081
[113]1082    def get_unit(self):
[1846]1083        """\
[113]1084        Get the default unit set in this scantable
[1846]1085
[113]1086        Returns:
[1846]1087
[113]1088            A unit string
[1846]1089
[113]1090        """
1091        inf = self._getcoordinfo()
1092        unit = inf[0]
1093        if unit == '': unit = 'channel'
1094        return unit
[102]1095
[1862]1096    @asaplog_post_dec
[158]1097    def get_abcissa(self, rowno=0):
[1846]1098        """\
[158]1099        Get the abcissa in the current coordinate setup for the currently
[113]1100        selected Beam/IF/Pol
[1846]1101
[113]1102        Parameters:
[1846]1103
[226]1104            rowno:    an optional row number in the scantable. Default is the
1105                      first row, i.e. rowno=0
[1846]1106
[113]1107        Returns:
[1846]1108
[1348]1109            The abcissa values and the format string (as a dictionary)
[1846]1110
[113]1111        """
[256]1112        abc = self._getabcissa(rowno)
[710]1113        lbl = self._getabcissalabel(rowno)
[158]1114        return abc, lbl
[113]1115
[1862]1116    @asaplog_post_dec
[1994]1117    def flag(self, row=-1, mask=None, unflag=False):
[1846]1118        """\
[1001]1119        Flag the selected data using an optional channel mask.
[1846]1120
[1001]1121        Parameters:
[1846]1122
[1994]1123            row:    an optional row number in the scantable.
1124                      Default -1 flags all rows
1125                     
[1001]1126            mask:   an optional channel mask, created with create_mask. Default
1127                    (no mask) is all channels.
[1855]1128
[1819]1129            unflag:    if True, unflag the data
[1846]1130
[1001]1131        """
1132        varlist = vars()
[1593]1133        mask = mask or []
[1994]1134        self._flag(row, mask, unflag)
[1001]1135        self._add_history("flag", varlist)
1136
[1862]1137    @asaplog_post_dec
[1819]1138    def flag_row(self, rows=[], unflag=False):
[1846]1139        """\
[1819]1140        Flag the selected data in row-based manner.
[1846]1141
[1819]1142        Parameters:
[1846]1143
[1843]1144            rows:   list of row numbers to be flagged. Default is no row
1145                    (must be explicitly specified to execute row-based flagging).
[1855]1146
[1819]1147            unflag: if True, unflag the data.
[1846]1148
[1819]1149        """
1150        varlist = vars()
[1859]1151        self._flag_row(rows, unflag)
[1819]1152        self._add_history("flag_row", varlist)
1153
[1862]1154    @asaplog_post_dec
[1819]1155    def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
[1846]1156        """\
[1819]1157        Flag the selected data outside a specified range (in channel-base)
[1846]1158
[1819]1159        Parameters:
[1846]1160
[1819]1161            uthres:      upper threshold.
[1855]1162
[1819]1163            dthres:      lower threshold
[1846]1164
[1819]1165            clipoutside: True for flagging data outside the range [dthres:uthres].
[1928]1166                         False for flagging data inside the range.
[1855]1167
[1846]1168            unflag:      if True, unflag the data.
1169
[1819]1170        """
1171        varlist = vars()
[1859]1172        self._clip(uthres, dthres, clipoutside, unflag)
[1819]1173        self._add_history("clip", varlist)
1174
[1862]1175    @asaplog_post_dec
[1584]1176    def lag_flag(self, start, end, unit="MHz", insitu=None):
[1846]1177        """\
[1192]1178        Flag the data in 'lag' space by providing a frequency to remove.
[2177]1179        Flagged data in the scantable get interpolated over the region.
[1192]1180        No taper is applied.
[1846]1181
[1192]1182        Parameters:
[1846]1183
[1579]1184            start:    the start frequency (really a period within the
1185                      bandwidth)  or period to remove
[1855]1186
[1579]1187            end:      the end frequency or period to remove
[1855]1188
[1584]1189            unit:     the frequency unit (default "MHz") or "" for
[1579]1190                      explicit lag channels
[1846]1191
1192        *Notes*:
1193
[1579]1194            It is recommended to flag edges of the band or strong
[1348]1195            signals beforehand.
[1846]1196
[1192]1197        """
1198        if insitu is None: insitu = rcParams['insitu']
1199        self._math._setinsitu(insitu)
1200        varlist = vars()
[1579]1201        base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1202        if not (unit == "" or base.has_key(unit)):
[1192]1203            raise ValueError("%s is not a valid unit." % unit)
[1859]1204        if unit == "":
1205            s = scantable(self._math._lag_flag(self, start, end, "lags"))
1206        else:
1207            s = scantable(self._math._lag_flag(self, start*base[unit],
1208                                               end*base[unit], "frequency"))
[1192]1209        s._add_history("lag_flag", varlist)
1210        if insitu:
1211            self._assign(s)
1212        else:
1213            return s
[1001]1214
[1862]1215    @asaplog_post_dec
[2186]1216    def fft(self, rowno=[], mask=[], getrealimag=False):
[2177]1217        """\
1218        Apply FFT to the spectra.
1219        Flagged data in the scantable get interpolated over the region.
1220
1221        Parameters:
[2186]1222
1223            rowno:          The row number(s) to be processed. int, list
1224                            and tuple are accepted. By default ([]), FFT
1225                            is applied to the whole data.
1226
1227            mask:           Auxiliary channel mask(s). Given as a boolean
1228                            list, it is applied to all specified rows.
1229                            A list of boolean lists can also be used to
1230                            apply different masks. In the latter case, the
1231                            length of 'mask' must be the same as that of
1232                            'rowno'. The default is [].
[2177]1233       
1234            getrealimag:    If True, returns the real and imaginary part
1235                            values of the complex results.
1236                            If False (the default), returns the amplitude
1237                            (absolute value) normalised with Ndata/2 and
1238                            phase (argument, in unit of radian).
1239
1240        Returns:
1241
[2186]1242            A list of dictionaries containing the results for each spectrum.
1243            Each dictionary contains two values, the real and the imaginary
1244            parts when getrealimag = True, or the amplitude(absolute value)
1245            and the phase(argument) when getrealimag = False. The key for
1246            these values are 'real' and 'imag', or 'ampl' and 'phase',
[2177]1247            respectively.
1248        """
1249        if isinstance(rowno, int):
1250            rowno = [rowno]
1251        elif not (isinstance(rowno, list) or isinstance(rowno, tuple)):
[2186]1252            raise TypeError("The row number(s) must be int, list or tuple.")
1253
1254        if len(rowno) == 0: rowno = [i for i in xrange(self.nrow())]
1255
1256        if not (isinstance(mask, list) or isinstance(mask, tuple)):
1257            raise TypeError("The mask must be a boolean list or a list of boolean list.")
1258        if len(mask) == 0: mask = [True for i in xrange(self.nchan())]
1259        if isinstance(mask[0], bool): mask = [mask]
1260        elif not (isinstance(mask[0], list) or isinstance(mask[0], tuple)):
1261            raise TypeError("The mask must be a boolean list or a list of boolean list.")
1262
1263        usecommonmask = (len(mask) == 1)
1264        if not usecommonmask:
1265            if len(mask) != len(rowno):
1266                raise ValueError("When specifying masks for each spectrum, the numbers of them must be identical.")
1267        for amask in mask:
1268            if len(amask) != self.nchan():
1269                raise ValueError("The spectra and the mask have different length.")
[2177]1270       
[2186]1271        res = []
1272
1273        imask = 0
1274        for whichrow in rowno:
1275            fspec = self._fft(whichrow, mask[imask], getrealimag)
1276            nspec = len(fspec)
[2177]1277           
[2186]1278            i=0
1279            v1=[]
1280            v2=[]
1281            reselem = {"real":[],"imag":[]} if getrealimag else {"ampl":[],"phase":[]}
[2177]1282           
[2186]1283            while (i < nspec):
1284                v1.append(fspec[i])
1285                v2.append(fspec[i+1])
1286                i+=2
1287           
[2177]1288            if getrealimag:
[2186]1289                reselem["real"]  += v1
1290                reselem["imag"]  += v2
[2177]1291            else:
[2186]1292                reselem["ampl"]  += v1
1293                reselem["phase"] += v2
[2177]1294           
[2186]1295            res.append(reselem)
1296           
1297            if not usecommonmask: imask += 1
1298       
[2177]1299        return res
1300
1301    @asaplog_post_dec
[113]1302    def create_mask(self, *args, **kwargs):
[1846]1303        """\
[1118]1304        Compute and return a mask based on [min, max] windows.
[189]1305        The specified windows are to be INCLUDED, when the mask is
[113]1306        applied.
[1846]1307
[102]1308        Parameters:
[1846]1309
[1118]1310            [min, max], [min2, max2], ...
[1024]1311                Pairs of start/end points (inclusive)specifying the regions
[102]1312                to be masked
[1855]1313
[189]1314            invert:     optional argument. If specified as True,
1315                        return an inverted mask, i.e. the regions
1316                        specified are EXCLUDED
[1855]1317
[513]1318            row:        create the mask using the specified row for
1319                        unit conversions, default is row=0
1320                        only necessary if frequency varies over rows.
[1846]1321
1322        Examples::
1323
[113]1324            scan.set_unit('channel')
[1846]1325            # a)
[1118]1326            msk = scan.create_mask([400, 500], [800, 900])
[189]1327            # masks everything outside 400 and 500
[113]1328            # and 800 and 900 in the unit 'channel'
1329
[1846]1330            # b)
[1118]1331            msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]1332            # masks the regions between 400 and 500
[113]1333            # and 800 and 900 in the unit 'channel'
[1846]1334
1335            # c)
1336            #mask only channel 400
[1554]1337            msk =  scan.create_mask([400])
[1846]1338
[102]1339        """
[1554]1340        row = kwargs.get("row", 0)
[513]1341        data = self._getabcissa(row)
[113]1342        u = self._getcoordinfo()[0]
[1859]1343        if u == "":
1344            u = "channel"
1345        msg = "The current mask window unit is %s" % u
1346        i = self._check_ifs()
1347        if not i:
1348            msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1349        asaplog.push(msg)
[102]1350        n = self.nchan()
[1295]1351        msk = _n_bools(n, False)
[710]1352        # test if args is a 'list' or a 'normal *args - UGLY!!!
1353
[1118]1354        ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1355             and args or args[0]
[710]1356        for window in ws:
[1554]1357            if len(window) == 1:
1358                window = [window[0], window[0]]
1359            if len(window) == 0 or  len(window) > 2:
1360                raise ValueError("A window needs to be defined as [start(, end)]")
[1545]1361            if window[0] > window[1]:
1362                tmp = window[0]
1363                window[0] = window[1]
1364                window[1] = tmp
[102]1365            for i in range(n):
[1024]1366                if data[i] >= window[0] and data[i] <= window[1]:
[1295]1367                    msk[i] = True
[113]1368        if kwargs.has_key('invert'):
1369            if kwargs.get('invert'):
[1295]1370                msk = mask_not(msk)
[102]1371        return msk
[710]1372
[1931]1373    def get_masklist(self, mask=None, row=0, silent=False):
[1846]1374        """\
[1819]1375        Compute and return a list of mask windows, [min, max].
[1846]1376
[1819]1377        Parameters:
[1846]1378
[1819]1379            mask:       channel mask, created with create_mask.
[1855]1380
[1819]1381            row:        calcutate the masklist using the specified row
1382                        for unit conversions, default is row=0
1383                        only necessary if frequency varies over rows.
[1846]1384
[1819]1385        Returns:
[1846]1386
[1819]1387            [min, max], [min2, max2], ...
1388                Pairs of start/end points (inclusive)specifying
1389                the masked regions
[1846]1390
[1819]1391        """
1392        if not (isinstance(mask,list) or isinstance(mask, tuple)):
1393            raise TypeError("The mask should be list or tuple.")
1394        if len(mask) < 2:
1395            raise TypeError("The mask elements should be > 1")
1396        if self.nchan() != len(mask):
1397            msg = "Number of channels in scantable != number of mask elements"
1398            raise TypeError(msg)
1399        data = self._getabcissa(row)
1400        u = self._getcoordinfo()[0]
[1859]1401        if u == "":
1402            u = "channel"
1403        msg = "The current mask window unit is %s" % u
1404        i = self._check_ifs()
1405        if not i:
1406            msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
[1931]1407        if not silent:
1408            asaplog.push(msg)
[1819]1409        masklist=[]
1410        ist, ien = None, None
1411        ist, ien=self.get_mask_indices(mask)
1412        if ist is not None and ien is not None:
1413            for i in xrange(len(ist)):
1414                range=[data[ist[i]],data[ien[i]]]
1415                range.sort()
1416                masklist.append([range[0],range[1]])
1417        return masklist
1418
1419    def get_mask_indices(self, mask=None):
[1846]1420        """\
[1819]1421        Compute and Return lists of mask start indices and mask end indices.
[1855]1422
1423        Parameters:
1424
[1819]1425            mask:       channel mask, created with create_mask.
[1846]1426
[1819]1427        Returns:
[1846]1428
[1819]1429            List of mask start indices and that of mask end indices,
1430            i.e., [istart1,istart2,....], [iend1,iend2,....].
[1846]1431
[1819]1432        """
1433        if not (isinstance(mask,list) or isinstance(mask, tuple)):
1434            raise TypeError("The mask should be list or tuple.")
1435        if len(mask) < 2:
1436            raise TypeError("The mask elements should be > 1")
1437        istart=[]
1438        iend=[]
1439        if mask[0]: istart.append(0)
1440        for i in range(len(mask)-1):
1441            if not mask[i] and mask[i+1]:
1442                istart.append(i+1)
1443            elif mask[i] and not mask[i+1]:
1444                iend.append(i)
1445        if mask[len(mask)-1]: iend.append(len(mask)-1)
1446        if len(istart) != len(iend):
1447            raise RuntimeError("Numbers of mask start != mask end.")
1448        for i in range(len(istart)):
1449            if istart[i] > iend[i]:
1450                raise RuntimeError("Mask start index > mask end index")
1451                break
1452        return istart,iend
1453
[2013]1454    @asaplog_post_dec
1455    def parse_maskexpr(self,maskstring):
1456        """
1457        Parse CASA type mask selection syntax (IF dependent).
1458
1459        Parameters:
1460            maskstring : A string mask selection expression.
1461                         A comma separated selections mean different IF -
1462                         channel combinations. IFs and channel selections
1463                         are partitioned by a colon, ':'.
1464                     examples:
[2015]1465                         ''          = all IFs (all channels)
[2013]1466                         '<2,4~6,9'  = IFs 0,1,4,5,6,9 (all channels)
1467                         '3:3~45;60' = channels 3 to 45 and 60 in IF 3
1468                         '0~1:2~6,8' = channels 2 to 6 in IFs 0,1, and
1469                                       all channels in IF8
1470        Returns:
1471        A dictionary of selected (valid) IF and masklist pairs,
1472        e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
1473        """
1474        if not isinstance(maskstring,str):
1475            asaplog.post()
1476            asaplog.push("Invalid mask expression")
1477            asaplog.post("ERROR")
1478       
1479        valid_ifs = self.getifnos()
1480        frequnit = self.get_unit()
1481        seldict = {}
[2015]1482        if maskstring == "":
1483            maskstring = str(valid_ifs)[1:-1]
[2013]1484        ## split each selection
1485        sellist = maskstring.split(',')
1486        for currselstr in sellist:
1487            selset = currselstr.split(':')
1488            # spw and mask string (may include ~, < or >)
1489            spwmasklist = self._parse_selection(selset[0],typestr='integer',
1490                                               offset=1,minval=min(valid_ifs),
1491                                               maxval=max(valid_ifs))
1492            for spwlist in spwmasklist:
1493                selspws = []
1494                for ispw in range(spwlist[0],spwlist[1]+1):
1495                    # Put into the list only if ispw exists
1496                    if valid_ifs.count(ispw):
1497                        selspws.append(ispw)
1498            del spwmasklist, spwlist
1499
1500            # parse frequency mask list
1501            if len(selset) > 1:
1502                freqmasklist = self._parse_selection(selset[1],typestr='float',
1503                                                    offset=0.)
1504            else:
1505                # want to select the whole spectrum
1506                freqmasklist = [None]
1507
1508            ## define a dictionary of spw - masklist combination
1509            for ispw in selspws:
1510                #print "working on", ispw
1511                spwstr = str(ispw)
1512                if len(selspws) == 0:
1513                    # empty spw
1514                    continue
1515                else:
1516                    ## want to get min and max of the spw and
1517                    ## offset to set for '<' and '>'
1518                    if frequnit == 'channel':
1519                        minfreq = 0
1520                        maxfreq = self.nchan(ifno=ispw)
1521                        offset = 0.5
1522                    else:
1523                        ## This is ugly part. need improvement
1524                        for ifrow in xrange(self.nrow()):
1525                            if self.getif(ifrow) == ispw:
1526                                #print "IF",ispw,"found in row =",ifrow
1527                                break
1528                        freqcoord = self.get_coordinate(ifrow)
1529                        freqs = self._getabcissa(ifrow)
1530                        minfreq = min(freqs)
1531                        maxfreq = max(freqs)
1532                        if len(freqs) == 1:
1533                            offset = 0.5
1534                        elif frequnit.find('Hz') > 0:
1535                            offset = abs(freqcoord.to_frequency(1,unit=frequnit)
1536                                      -freqcoord.to_frequency(0,unit=frequnit))*0.5
1537                        elif frequnit.find('m/s') > 0:
1538                            offset = abs(freqcoord.to_velocity(1,unit=frequnit)
1539                                      -freqcoord.to_velocity(0,unit=frequnit))*0.5
1540                        else:
1541                            asaplog.post()
1542                            asaplog.push("Invalid frequency unit")
1543                            asaplog.post("ERROR")
1544                        del freqs, freqcoord, ifrow
1545                    for freq in freqmasklist:
1546                        selmask = freq or [minfreq, maxfreq]
1547                        if selmask[0] == None:
1548                            ## selection was "<freq[1]".
1549                            if selmask[1] < minfreq:
1550                                ## avoid adding region selection
1551                                selmask = None
1552                            else:
1553                                selmask = [minfreq,selmask[1]-offset]
1554                        elif selmask[1] == None:
1555                            ## selection was ">freq[0]"
1556                            if selmask[0] > maxfreq:
1557                                ## avoid adding region selection
1558                                selmask = None
1559                            else:
1560                                selmask = [selmask[0]+offset,maxfreq]
1561                        if selmask:
1562                            if not seldict.has_key(spwstr):
1563                                # new spw selection
1564                                seldict[spwstr] = []
1565                            seldict[spwstr] += [selmask]
1566                    del minfreq,maxfreq,offset,freq,selmask
1567                del spwstr
1568            del freqmasklist
1569        del valid_ifs
1570        if len(seldict) == 0:
1571            asaplog.post()
1572            asaplog.push("No valid selection in the mask expression: "+maskstring)
1573            asaplog.post("WARN")
1574            return None
1575        msg = "Selected masklist:\n"
1576        for sif, lmask in seldict.iteritems():
1577            msg += "   IF"+sif+" - "+str(lmask)+"\n"
1578        asaplog.push(msg)
1579        return seldict
1580
1581    def _parse_selection(self,selstr,typestr='float',offset=0.,minval=None,maxval=None):
1582        """
1583        Parameters:
1584            selstr :    The Selection string, e.g., '<3;5~7;100~103;9'
1585            typestr :   The type of the values in returned list
1586                        ('integer' or 'float')
1587            offset :    The offset value to subtract from or add to
1588                        the boundary value if the selection string
1589                        includes '<' or '>'
1590            minval, maxval :  The minimum/maximum values to set if the
1591                              selection string includes '<' or '>'.
1592                              The list element is filled with None by default.
1593        Returns:
1594            A list of min/max pair of selections.
1595        Example:
1596            _parseSelection('<3;5~7;9',typestr='int',offset=1,minval=0)
1597            returns [[0,2],[5,7],[9,9]]
1598        """
1599        selgroups = selstr.split(';')
1600        sellists = []
1601        if typestr.lower().startswith('int'):
1602            formatfunc = int
1603        else:
1604            formatfunc = float
1605       
1606        for currsel in  selgroups:
1607            if currsel.find('~') > 0:
1608                minsel = formatfunc(currsel.split('~')[0].strip())
1609                maxsel = formatfunc(currsel.split('~')[1].strip())
1610            elif currsel.strip().startswith('<'):
1611                minsel = minval
1612                maxsel = formatfunc(currsel.split('<')[1].strip()) \
1613                         - formatfunc(offset)
1614            elif currsel.strip().startswith('>'):
1615                minsel = formatfunc(currsel.split('>')[1].strip()) \
1616                         + formatfunc(offset)
1617                maxsel = maxval
1618            else:
1619                minsel = formatfunc(currsel)
1620                maxsel = formatfunc(currsel)
1621            sellists.append([minsel,maxsel])
1622        return sellists
1623
[1819]1624#    def get_restfreqs(self):
1625#        """
1626#        Get the restfrequency(s) stored in this scantable.
1627#        The return value(s) are always of unit 'Hz'
1628#        Parameters:
1629#            none
1630#        Returns:
1631#            a list of doubles
1632#        """
1633#        return list(self._getrestfreqs())
1634
1635    def get_restfreqs(self, ids=None):
[1846]1636        """\
[256]1637        Get the restfrequency(s) stored in this scantable.
1638        The return value(s) are always of unit 'Hz'
[1846]1639
[256]1640        Parameters:
[1846]1641
[1819]1642            ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1643                 be retrieved
[1846]1644
[256]1645        Returns:
[1846]1646
[1819]1647            dictionary containing ids and a list of doubles for each id
[1846]1648
[256]1649        """
[1819]1650        if ids is None:
1651            rfreqs={}
1652            idlist = self.getmolnos()
1653            for i in idlist:
1654                rfreqs[i]=list(self._getrestfreqs(i))
1655            return rfreqs
1656        else:
1657            if type(ids)==list or type(ids)==tuple:
1658                rfreqs={}
1659                for i in ids:
1660                    rfreqs[i]=list(self._getrestfreqs(i))
1661                return rfreqs
1662            else:
1663                return list(self._getrestfreqs(ids))
1664            #return list(self._getrestfreqs(ids))
[102]1665
[931]1666    def set_restfreqs(self, freqs=None, unit='Hz'):
[1846]1667        """\
[931]1668        Set or replace the restfrequency specified and
[1938]1669        if the 'freqs' argument holds a scalar,
[931]1670        then that rest frequency will be applied to all the selected
1671        data.  If the 'freqs' argument holds
1672        a vector, then it MUST be of equal or smaller length than
1673        the number of IFs (and the available restfrequencies will be
1674        replaced by this vector).  In this case, *all* data have
1675        the restfrequency set per IF according
1676        to the corresponding value you give in the 'freqs' vector.
[1118]1677        E.g. 'freqs=[1e9, 2e9]'  would mean IF 0 gets restfreq 1e9 and
[931]1678        IF 1 gets restfreq 2e9.
[1846]1679
[1395]1680        You can also specify the frequencies via a linecatalog.
[1153]1681
[931]1682        Parameters:
[1846]1683
[931]1684            freqs:   list of rest frequency values or string idenitfiers
[1855]1685
[931]1686            unit:    unit for rest frequency (default 'Hz')
[402]1687
[1846]1688
1689        Example::
1690
[1819]1691            # set the given restfrequency for the all currently selected IFs
[931]1692            scan.set_restfreqs(freqs=1.4e9)
[1845]1693            # set restfrequencies for the n IFs  (n > 1) in the order of the
1694            # list, i.e
1695            # IF0 -> 1.4e9, IF1 ->  1.41e9, IF3 -> 1.42e9
1696            # len(list_of_restfreqs) == nIF
1697            # for nIF == 1 the following will set multiple restfrequency for
1698            # that IF
[1819]1699            scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
[1845]1700            # set multiple restfrequencies per IF. as a list of lists where
1701            # the outer list has nIF elements, the inner s arbitrary
1702            scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
[391]1703
[1846]1704       *Note*:
[1845]1705
[931]1706            To do more sophisticate Restfrequency setting, e.g. on a
1707            source and IF basis, use scantable.set_selection() before using
[1846]1708            this function::
[931]1709
[1846]1710                # provided your scantable is called scan
1711                selection = selector()
1712                selection.set_name("ORION*")
1713                selection.set_ifs([1])
1714                scan.set_selection(selection)
1715                scan.set_restfreqs(freqs=86.6e9)
1716
[931]1717        """
1718        varlist = vars()
[1157]1719        from asap import linecatalog
1720        # simple  value
[1118]1721        if isinstance(freqs, int) or isinstance(freqs, float):
[1845]1722            self._setrestfreqs([freqs], [""], unit)
[1157]1723        # list of values
[1118]1724        elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]1725            # list values are scalars
[1118]1726            if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[1845]1727                if len(freqs) == 1:
1728                    self._setrestfreqs(freqs, [""], unit)
1729                else:
1730                    # allow the 'old' mode of setting mulitple IFs
1731                    sel = selector()
1732                    savesel = self._getselection()
1733                    iflist = self.getifnos()
1734                    if len(freqs)>len(iflist):
1735                        raise ValueError("number of elements in list of list "
1736                                         "exeeds the current IF selections")
1737                    iflist = self.getifnos()
1738                    for i, fval in enumerate(freqs):
1739                        sel.set_ifs(iflist[i])
1740                        self._setselection(sel)
1741                        self._setrestfreqs([fval], [""], unit)
1742                    self._setselection(savesel)
1743
1744            # list values are dict, {'value'=, 'name'=)
[1157]1745            elif isinstance(freqs[-1], dict):
[1845]1746                values = []
1747                names = []
1748                for d in freqs:
1749                    values.append(d["value"])
1750                    names.append(d["name"])
1751                self._setrestfreqs(values, names, unit)
[1819]1752            elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
[1157]1753                sel = selector()
1754                savesel = self._getselection()
[1322]1755                iflist = self.getifnos()
[1819]1756                if len(freqs)>len(iflist):
[1845]1757                    raise ValueError("number of elements in list of list exeeds"
1758                                     " the current IF selections")
1759                for i, fval in enumerate(freqs):
[1322]1760                    sel.set_ifs(iflist[i])
[1259]1761                    self._setselection(sel)
[1845]1762                    self._setrestfreqs(fval, [""], unit)
[1157]1763                self._setselection(savesel)
1764        # freqs are to be taken from a linecatalog
[1153]1765        elif isinstance(freqs, linecatalog):
1766            sel = selector()
1767            savesel = self._getselection()
1768            for i in xrange(freqs.nrow()):
[1322]1769                sel.set_ifs(iflist[i])
[1153]1770                self._setselection(sel)
[1845]1771                self._setrestfreqs([freqs.get_frequency(i)],
1772                                   [freqs.get_name(i)], "MHz")
[1153]1773                # ensure that we are not iterating past nIF
1774                if i == self.nif()-1: break
1775            self._setselection(savesel)
[931]1776        else:
1777            return
1778        self._add_history("set_restfreqs", varlist)
1779
[1360]1780    def shift_refpix(self, delta):
[1846]1781        """\
[1589]1782        Shift the reference pixel of the Spectra Coordinate by an
1783        integer amount.
[1846]1784
[1589]1785        Parameters:
[1846]1786
[1589]1787            delta:   the amount to shift by
[1846]1788
1789        *Note*:
1790
[1589]1791            Be careful using this with broadband data.
[1846]1792
[1360]1793        """
[1731]1794        Scantable.shift_refpix(self, delta)
[931]1795
[1862]1796    @asaplog_post_dec
[1259]1797    def history(self, filename=None):
[1846]1798        """\
[1259]1799        Print the history. Optionally to a file.
[1846]1800
[1348]1801        Parameters:
[1846]1802
[1928]1803            filename:    The name of the file to save the history to.
[1846]1804
[1259]1805        """
[484]1806        hist = list(self._gethistory())
[794]1807        out = "-"*80
[484]1808        for h in hist:
[489]1809            if h.startswith("---"):
[1857]1810                out = "\n".join([out, h])
[489]1811            else:
1812                items = h.split("##")
1813                date = items[0]
1814                func = items[1]
1815                items = items[2:]
[794]1816                out += "\n"+date+"\n"
1817                out += "Function: %s\n  Parameters:" % (func)
[489]1818                for i in items:
[1938]1819                    if i == '':
1820                        continue
[489]1821                    s = i.split("=")
[1118]1822                    out += "\n   %s = %s" % (s[0], s[1])
[1857]1823                out = "\n".join([out, "-"*80])
[1259]1824        if filename is not None:
1825            if filename is "":
1826                filename = 'scantable_history.txt'
1827            import os
1828            filename = os.path.expandvars(os.path.expanduser(filename))
1829            if not os.path.isdir(filename):
1830                data = open(filename, 'w')
1831                data.write(out)
1832                data.close()
1833            else:
1834                msg = "Illegal file name '%s'." % (filename)
[1859]1835                raise IOError(msg)
1836        return page(out)
[513]1837    #
1838    # Maths business
1839    #
[1862]1840    @asaplog_post_dec
[931]1841    def average_time(self, mask=None, scanav=False, weight='tint', align=False):
[1846]1842        """\
[1070]1843        Return the (time) weighted average of a scan.
[1846]1844
1845        *Note*:
1846
[1070]1847            in channels only - align if necessary
[1846]1848
[513]1849        Parameters:
[1846]1850
[513]1851            mask:     an optional mask (only used for 'var' and 'tsys'
1852                      weighting)
[1855]1853
[558]1854            scanav:   True averages each scan separately
1855                      False (default) averages all scans together,
[1855]1856
[1099]1857            weight:   Weighting scheme.
1858                      'none'     (mean no weight)
1859                      'var'      (1/var(spec) weighted)
1860                      'tsys'     (1/Tsys**2 weighted)
1861                      'tint'     (integration time weighted)
1862                      'tintsys'  (Tint/Tsys**2)
1863                      'median'   ( median averaging)
[535]1864                      The default is 'tint'
[1855]1865
[931]1866            align:    align the spectra in velocity before averaging. It takes
1867                      the time of the first spectrum as reference time.
[1846]1868
1869        Example::
1870
[513]1871            # time average the scantable without using a mask
[710]1872            newscan = scan.average_time()
[1846]1873
[513]1874        """
1875        varlist = vars()
[1593]1876        weight = weight or 'TINT'
1877        mask = mask or ()
1878        scanav = (scanav and 'SCAN') or 'NONE'
[1118]1879        scan = (self, )
[1859]1880
1881        if align:
1882            scan = (self.freq_align(insitu=False), )
1883        s = None
1884        if weight.upper() == 'MEDIAN':
1885            s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1886                                                     scanav))
1887        else:
1888            s = scantable(self._math._average(scan, mask, weight.upper(),
1889                          scanav))
[1099]1890        s._add_history("average_time", varlist)
[513]1891        return s
[710]1892
[1862]1893    @asaplog_post_dec
[876]1894    def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[1846]1895        """\
[513]1896        Return a scan where all spectra are converted to either
1897        Jansky or Kelvin depending upon the flux units of the scan table.
1898        By default the function tries to look the values up internally.
1899        If it can't find them (or if you want to over-ride), you must
1900        specify EITHER jyperk OR eta (and D which it will try to look up
1901        also if you don't set it). jyperk takes precedence if you set both.
[1846]1902
[513]1903        Parameters:
[1846]1904
[513]1905            jyperk:      the Jy / K conversion factor
[1855]1906
[513]1907            eta:         the aperture efficiency
[1855]1908
[1928]1909            d:           the geometric diameter (metres)
[1855]1910
[513]1911            insitu:      if False a new scantable is returned.
1912                         Otherwise, the scaling is done in-situ
1913                         The default is taken from .asaprc (False)
[1846]1914
[513]1915        """
1916        if insitu is None: insitu = rcParams['insitu']
[876]1917        self._math._setinsitu(insitu)
[513]1918        varlist = vars()
[1593]1919        jyperk = jyperk or -1.0
1920        d = d or -1.0
1921        eta = eta or -1.0
[876]1922        s = scantable(self._math._convertflux(self, d, eta, jyperk))
1923        s._add_history("convert_flux", varlist)
1924        if insitu: self._assign(s)
1925        else: return s
[513]1926
[1862]1927    @asaplog_post_dec
[876]1928    def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[1846]1929        """\
[513]1930        Return a scan after applying a gain-elevation correction.
1931        The correction can be made via either a polynomial or a
1932        table-based interpolation (and extrapolation if necessary).
1933        You specify polynomial coefficients, an ascii table or neither.
1934        If you specify neither, then a polynomial correction will be made
1935        with built in coefficients known for certain telescopes (an error
1936        will occur if the instrument is not known).
1937        The data and Tsys are *divided* by the scaling factors.
[1846]1938
[513]1939        Parameters:
[1846]1940
[513]1941            poly:        Polynomial coefficients (default None) to compute a
1942                         gain-elevation correction as a function of
1943                         elevation (in degrees).
[1855]1944
[513]1945            filename:    The name of an ascii file holding correction factors.
1946                         The first row of the ascii file must give the column
1947                         names and these MUST include columns
1948                         "ELEVATION" (degrees) and "FACTOR" (multiply data
1949                         by this) somewhere.
1950                         The second row must give the data type of the
1951                         column. Use 'R' for Real and 'I' for Integer.
1952                         An example file would be
1953                         (actual factors are arbitrary) :
1954
1955                         TIME ELEVATION FACTOR
1956                         R R R
1957                         0.1 0 0.8
1958                         0.2 20 0.85
1959                         0.3 40 0.9
1960                         0.4 60 0.85
1961                         0.5 80 0.8
1962                         0.6 90 0.75
[1855]1963
[513]1964            method:      Interpolation method when correcting from a table.
1965                         Values are  "nearest", "linear" (default), "cubic"
1966                         and "spline"
[1855]1967
[513]1968            insitu:      if False a new scantable is returned.
1969                         Otherwise, the scaling is done in-situ
1970                         The default is taken from .asaprc (False)
[1846]1971
[513]1972        """
1973
1974        if insitu is None: insitu = rcParams['insitu']
[876]1975        self._math._setinsitu(insitu)
[513]1976        varlist = vars()
[1593]1977        poly = poly or ()
[513]1978        from os.path import expandvars
1979        filename = expandvars(filename)
[876]1980        s = scantable(self._math._gainel(self, poly, filename, method))
1981        s._add_history("gain_el", varlist)
[1593]1982        if insitu:
1983            self._assign(s)
1984        else:
1985            return s
[710]1986
[1862]1987    @asaplog_post_dec
[931]1988    def freq_align(self, reftime=None, method='cubic', insitu=None):
[1846]1989        """\
[513]1990        Return a scan where all rows have been aligned in frequency/velocity.
1991        The alignment frequency frame (e.g. LSRK) is that set by function
1992        set_freqframe.
[1846]1993
[513]1994        Parameters:
[1855]1995
[513]1996            reftime:     reference time to align at. By default, the time of
1997                         the first row of data is used.
[1855]1998
[513]1999            method:      Interpolation method for regridding the spectra.
2000                         Choose from "nearest", "linear", "cubic" (default)
2001                         and "spline"
[1855]2002
[513]2003            insitu:      if False a new scantable is returned.
2004                         Otherwise, the scaling is done in-situ
2005                         The default is taken from .asaprc (False)
[1846]2006
[513]2007        """
[931]2008        if insitu is None: insitu = rcParams["insitu"]
[876]2009        self._math._setinsitu(insitu)
[513]2010        varlist = vars()
[1593]2011        reftime = reftime or ""
[931]2012        s = scantable(self._math._freq_align(self, reftime, method))
[876]2013        s._add_history("freq_align", varlist)
2014        if insitu: self._assign(s)
2015        else: return s
[513]2016
[1862]2017    @asaplog_post_dec
[1725]2018    def opacity(self, tau=None, insitu=None):
[1846]2019        """\
[513]2020        Apply an opacity correction. The data
2021        and Tsys are multiplied by the correction factor.
[1846]2022
[513]2023        Parameters:
[1855]2024
[1689]2025            tau:         (list of) opacity from which the correction factor is
[513]2026                         exp(tau*ZD)
[1689]2027                         where ZD is the zenith-distance.
2028                         If a list is provided, it has to be of length nIF,
2029                         nIF*nPol or 1 and in order of IF/POL, e.g.
2030                         [opif0pol0, opif0pol1, opif1pol0 ...]
[1725]2031                         if tau is `None` the opacities are determined from a
2032                         model.
[1855]2033
[513]2034            insitu:      if False a new scantable is returned.
2035                         Otherwise, the scaling is done in-situ
2036                         The default is taken from .asaprc (False)
[1846]2037
[513]2038        """
2039        if insitu is None: insitu = rcParams['insitu']
[876]2040        self._math._setinsitu(insitu)
[513]2041        varlist = vars()
[1689]2042        if not hasattr(tau, "__len__"):
2043            tau = [tau]
[876]2044        s = scantable(self._math._opacity(self, tau))
2045        s._add_history("opacity", varlist)
2046        if insitu: self._assign(s)
2047        else: return s
[513]2048
[1862]2049    @asaplog_post_dec
[513]2050    def bin(self, width=5, insitu=None):
[1846]2051        """\
[513]2052        Return a scan where all spectra have been binned up.
[1846]2053
[1348]2054        Parameters:
[1846]2055
[513]2056            width:       The bin width (default=5) in pixels
[1855]2057
[513]2058            insitu:      if False a new scantable is returned.
2059                         Otherwise, the scaling is done in-situ
2060                         The default is taken from .asaprc (False)
[1846]2061
[513]2062        """
2063        if insitu is None: insitu = rcParams['insitu']
[876]2064        self._math._setinsitu(insitu)
[513]2065        varlist = vars()
[876]2066        s = scantable(self._math._bin(self, width))
[1118]2067        s._add_history("bin", varlist)
[1589]2068        if insitu:
2069            self._assign(s)
2070        else:
2071            return s
[513]2072
[1862]2073    @asaplog_post_dec
[513]2074    def resample(self, width=5, method='cubic', insitu=None):
[1846]2075        """\
[1348]2076        Return a scan where all spectra have been binned up.
[1573]2077
[1348]2078        Parameters:
[1846]2079
[513]2080            width:       The bin width (default=5) in pixels
[1855]2081
[513]2082            method:      Interpolation method when correcting from a table.
2083                         Values are  "nearest", "linear", "cubic" (default)
2084                         and "spline"
[1855]2085
[513]2086            insitu:      if False a new scantable is returned.
2087                         Otherwise, the scaling is done in-situ
2088                         The default is taken from .asaprc (False)
[1846]2089
[513]2090        """
2091        if insitu is None: insitu = rcParams['insitu']
[876]2092        self._math._setinsitu(insitu)
[513]2093        varlist = vars()
[876]2094        s = scantable(self._math._resample(self, method, width))
[1118]2095        s._add_history("resample", varlist)
[876]2096        if insitu: self._assign(s)
2097        else: return s
[513]2098
[1862]2099    @asaplog_post_dec
[946]2100    def average_pol(self, mask=None, weight='none'):
[1846]2101        """\
[946]2102        Average the Polarisations together.
[1846]2103
[946]2104        Parameters:
[1846]2105
[946]2106            mask:        An optional mask defining the region, where the
2107                         averaging will be applied. The output will have all
2108                         specified points masked.
[1855]2109
[946]2110            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
2111                         weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]2112
[946]2113        """
2114        varlist = vars()
[1593]2115        mask = mask or ()
[1010]2116        s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]2117        s._add_history("average_pol", varlist)
[992]2118        return s
[513]2119
[1862]2120    @asaplog_post_dec
[1145]2121    def average_beam(self, mask=None, weight='none'):
[1846]2122        """\
[1145]2123        Average the Beams together.
[1846]2124
[1145]2125        Parameters:
2126            mask:        An optional mask defining the region, where the
2127                         averaging will be applied. The output will have all
2128                         specified points masked.
[1855]2129
[1145]2130            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
2131                         weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]2132
[1145]2133        """
2134        varlist = vars()
[1593]2135        mask = mask or ()
[1145]2136        s = scantable(self._math._averagebeams(self, mask, weight.upper()))
2137        s._add_history("average_beam", varlist)
2138        return s
2139
[1586]2140    def parallactify(self, pflag):
[1846]2141        """\
[1843]2142        Set a flag to indicate whether this data should be treated as having
[1617]2143        been 'parallactified' (total phase == 0.0)
[1846]2144
[1617]2145        Parameters:
[1855]2146
[1843]2147            pflag:  Bool indicating whether to turn this on (True) or
[1617]2148                    off (False)
[1846]2149
[1617]2150        """
[1586]2151        varlist = vars()
2152        self._parallactify(pflag)
2153        self._add_history("parallactify", varlist)
2154
[1862]2155    @asaplog_post_dec
[992]2156    def convert_pol(self, poltype=None):
[1846]2157        """\
[992]2158        Convert the data to a different polarisation type.
[1565]2159        Note that you will need cross-polarisation terms for most conversions.
[1846]2160
[992]2161        Parameters:
[1855]2162
[992]2163            poltype:    The new polarisation type. Valid types are:
[1565]2164                        "linear", "circular", "stokes" and "linpol"
[1846]2165
[992]2166        """
2167        varlist = vars()
[1859]2168        s = scantable(self._math._convertpol(self, poltype))
[1118]2169        s._add_history("convert_pol", varlist)
[992]2170        return s
2171
[1862]2172    @asaplog_post_dec
[1819]2173    def smooth(self, kernel="hanning", width=5.0, order=2, plot=False, insitu=None):
[1846]2174        """\
[513]2175        Smooth the spectrum by the specified kernel (conserving flux).
[1846]2176
[513]2177        Parameters:
[1846]2178
[513]2179            kernel:     The type of smoothing kernel. Select from
[1574]2180                        'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
2181                        or 'poly'
[1855]2182
[513]2183            width:      The width of the kernel in pixels. For hanning this is
2184                        ignored otherwise it defauls to 5 pixels.
2185                        For 'gaussian' it is the Full Width Half
2186                        Maximum. For 'boxcar' it is the full width.
[1574]2187                        For 'rmedian' and 'poly' it is the half width.
[1855]2188
[1574]2189            order:      Optional parameter for 'poly' kernel (default is 2), to
2190                        specify the order of the polnomial. Ignored by all other
2191                        kernels.
[1855]2192
[1819]2193            plot:       plot the original and the smoothed spectra.
2194                        In this each indivual fit has to be approved, by
2195                        typing 'y' or 'n'
[1855]2196
[513]2197            insitu:     if False a new scantable is returned.
2198                        Otherwise, the scaling is done in-situ
2199                        The default is taken from .asaprc (False)
[1846]2200
[513]2201        """
2202        if insitu is None: insitu = rcParams['insitu']
[876]2203        self._math._setinsitu(insitu)
[513]2204        varlist = vars()
[1819]2205
2206        if plot: orgscan = self.copy()
2207
[1574]2208        s = scantable(self._math._smooth(self, kernel.lower(), width, order))
[876]2209        s._add_history("smooth", varlist)
[1819]2210
2211        if plot:
[2150]2212            from asap.asapplotter import new_asaplot
2213            theplot = new_asaplot(rcParams['plotter.gui'])
2214            theplot.set_panels()
[1819]2215            ylab=s._get_ordinate_label()
[2150]2216            #theplot.palette(0,["#777777","red"])
[1819]2217            for r in xrange(s.nrow()):
2218                xsm=s._getabcissa(r)
2219                ysm=s._getspectrum(r)
2220                xorg=orgscan._getabcissa(r)
2221                yorg=orgscan._getspectrum(r)
[2150]2222                theplot.clear()
2223                theplot.hold()
2224                theplot.set_axes('ylabel',ylab)
2225                theplot.set_axes('xlabel',s._getabcissalabel(r))
2226                theplot.set_axes('title',s._getsourcename(r))
2227                theplot.set_line(label='Original',color="#777777")
2228                theplot.plot(xorg,yorg)
2229                theplot.set_line(label='Smoothed',color="red")
2230                theplot.plot(xsm,ysm)
[1819]2231                ### Ugly part for legend
2232                for i in [0,1]:
[2150]2233                    theplot.subplots[0]['lines'].append([theplot.subplots[0]['axes'].lines[i]])
2234                theplot.release()
[1819]2235                ### Ugly part for legend
[2150]2236                theplot.subplots[0]['lines']=[]
[1819]2237                res = raw_input("Accept smoothing ([y]/n): ")
2238                if res.upper() == 'N':
2239                    s._setspectrum(yorg, r)
[2150]2240            theplot.quit()
2241            del theplot
[1819]2242            del orgscan
2243
[876]2244        if insitu: self._assign(s)
2245        else: return s
[513]2246
[2186]2247    @asaplog_post_dec
2248    def _parse_wn(self, wn):
2249        if isinstance(wn, list) or isinstance(wn, tuple):
2250            return wn
2251        elif isinstance(wn, int):
2252            return [ wn ]
2253        elif isinstance(wn, str):
2254            if '-' in wn:                            # case 'a-b' : return [a,a+1,...,b-1,b]
2255                val = wn.split('-')
2256                val = [int(val[0]), int(val[1])]
2257                val.sort()
2258                res = [i for i in xrange(val[0], val[1]+1)]
2259            elif wn[:2] == '<=' or wn[:2] == '=<':   # cases '<=a','=<a' : return [0,1,...,a-1,a]
2260                val = int(wn[2:])+1
2261                res = [i for i in xrange(val)]
2262            elif wn[-2:] == '>=' or wn[-2:] == '=>': # cases 'a>=','a=>' : return [0,1,...,a-1,a]
2263                val = int(wn[:-2])+1
2264                res = [i for i in xrange(val)]
2265            elif wn[0] == '<':                       # case '<a' :         return [0,1,...,a-2,a-1]
2266                val = int(wn[1:])
2267                res = [i for i in xrange(val)]
2268            elif wn[-1] == '>':                      # case 'a>' :         return [0,1,...,a-2,a-1]
2269                val = int(wn[:-1])
2270                res = [i for i in xrange(val)]
2271            elif wn[:2] == '>=' or wn[:2] == '=>':   # cases '>=a','=>a' : return [a,a+1,...,a_nyq]
2272                val = int(wn[2:])
2273                res = [i for i in xrange(val, self.nchan()/2+1)]
2274            elif wn[-2:] == '<=' or wn[-2:] == '=<': # cases 'a<=','a=<' : return [a,a+1,...,a_nyq]
2275                val = int(wn[:-2])
2276                res = [i for i in xrange(val, self.nchan()/2+1)]
2277            elif wn[0] == '>':                       # case '>a' :         return [a+1,a+2,...,a_nyq]
2278                val = int(wn[1:])+1
2279                res = [i for i in xrange(val, self.nchan()/2+1)]
2280            elif wn[-1] == '<':                      # case 'a<' :         return [a+1,a+2,...,a_nyq]
2281                val = int(wn[:-1])+1
2282                res = [i for i in xrange(val, self.nchan()/2+1)]
[2012]2283
[2186]2284            return res
2285        else:
2286            msg = 'wrong value given for addwn/rejwn'
2287            raise RuntimeError(msg)
2288
2289
[1862]2290    @asaplog_post_dec
[2186]2291    def sinusoid_baseline(self, insitu=None, mask=None, applyfft=None, fftmethod=None, fftthresh=None,
2292                          addwn=None, rejwn=None, clipthresh=None, clipniter=None, plot=None,
2293                          getresidual=None, outlog=None, blfile=None):
[2047]2294        """\
[2094]2295        Return a scan which has been baselined (all rows) with sinusoidal functions.
[2047]2296        Parameters:
[2186]2297            insitu:        if False a new scantable is returned.
[2081]2298                           Otherwise, the scaling is done in-situ
2299                           The default is taken from .asaprc (False)
[2186]2300            mask:          an optional mask
2301            applyfft:      if True use some method, such as FFT, to find
2302                           strongest sinusoidal components in the wavenumber
2303                           domain to be used for baseline fitting.
2304                           default is True.
2305            fftmethod:     method to find the strong sinusoidal components.
2306                           now only 'fft' is available and it is the default.
2307            fftthresh:     the threshold to select wave numbers to be used for
2308                           fitting from the distribution of amplitudes in the
2309                           wavenumber domain.
2310                           both float and string values accepted.
2311                           given a float value, the unit is set to sigma.
2312                           for string values, allowed formats include:
2313                               'xsigma' or 'x' (= x-sigma level. e.g., '3sigma'), or
2314                               'topx' (= the x strongest ones, e.g. 'top5').
2315                           default is 3.0 (unit: sigma).
2316            addwn:         the additional wave numbers to be used for fitting.
2317                           list or integer value is accepted to specify every
2318                           wave numbers. also string value can be used in case
2319                           you need to specify wave numbers in a certain range,
2320                           e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
2321                                 '<a'  (= 0,1,...,a-2,a-1),
2322                                 '>=a' (= a, a+1, ... up to the maximum wave
2323                                        number corresponding to the Nyquist
2324                                        frequency for the case of FFT).
2325                           default is [].
2326            rejwn:         the wave numbers NOT to be used for fitting.
2327                           can be set just as addwn but has higher priority:
2328                           wave numbers which are specified both in addwn
2329                           and rejwn will NOT be used. default is [].
[2081]2330            clipthresh:    Clipping threshold. (default is 3.0, unit: sigma)
[2129]2331            clipniter:     maximum number of iteration of 'clipthresh'-sigma clipping (default is 0)
[2081]2332            plot:      *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2333                           plot the fit and the residual. In this each
2334                           indivual fit has to be approved, by typing 'y'
2335                           or 'n'
2336            getresidual:   if False, returns best-fit values instead of
2337                           residual. (default is True)
2338            outlog:        Output the coefficients of the best-fit
2339                           function to logger (default is False)
2340            blfile:        Name of a text file in which the best-fit
2341                           parameter values to be written
[2186]2342                           (default is '': no file/logger output)
[2047]2343
2344        Example:
2345            # return a scan baselined by a combination of sinusoidal curves having
[2081]2346            # wave numbers in spectral window up to 10,
[2047]2347            # also with 3-sigma clipping, iteration up to 4 times
[2186]2348            bscan = scan.sinusoid_baseline(addwn='<=10',clipthresh=3.0,clipniter=4)
[2081]2349
2350        Note:
2351            The best-fit parameter values output in logger and/or blfile are now
2352            based on specunit of 'channel'.
[2047]2353        """
2354       
[2186]2355        try:
2356            varlist = vars()
[2047]2357       
[2186]2358            if insitu is None: insitu = rcParams['insitu']
2359            if insitu:
2360                workscan = self
2361            else:
2362                workscan = self.copy()
2363           
2364            if mask          is None: mask          = [True for i in xrange(workscan.nchan())]
2365            if applyfft      is None: applyfft      = True
2366            if fftmethod     is None: fftmethod     = 'fft'
2367            if fftthresh     is None: fftthresh     = 3.0
2368            if addwn         is None: addwn         = []
2369            if rejwn         is None: rejwn         = []
2370            if clipthresh    is None: clipthresh    = 3.0
2371            if clipniter     is None: clipniter     = 0
2372            if plot          is None: plot          = False
2373            if getresidual   is None: getresidual   = True
2374            if outlog        is None: outlog        = False
2375            if blfile        is None: blfile        = ''
[2047]2376
[2081]2377            #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
[2186]2378            workscan._sinusoid_baseline(mask, applyfft, fftmethod.lower(), str(fftthresh).lower(), workscan._parse_wn(addwn), workscan._parse_wn(rejwn), clipthresh, clipniter, getresidual, outlog, blfile)
2379            workscan._add_history('sinusoid_baseline', varlist)
[2047]2380           
2381            if insitu:
2382                self._assign(workscan)
2383            else:
2384                return workscan
2385           
2386        except RuntimeError, e:
[2186]2387            raise_fitting_failure_exception(e)
[2047]2388
2389
[2186]2390    @asaplog_post_dec
2391    def auto_sinusoid_baseline(self, insitu=None, mask=None, applyfft=None, fftmethod=None, fftthresh=None,
2392                               addwn=None, rejwn=None, clipthresh=None, clipniter=None, edge=None, threshold=None,
[2081]2393                               chan_avg_limit=None, plot=None, getresidual=None, outlog=None, blfile=None):
[2047]2394        """\
[2094]2395        Return a scan which has been baselined (all rows) with sinusoidal functions.
[2047]2396        Spectral lines are detected first using linefinder and masked out
2397        to avoid them affecting the baseline solution.
2398
2399        Parameters:
[2081]2400            insitu:        if False a new scantable is returned.
2401                           Otherwise, the scaling is done in-situ
2402                           The default is taken from .asaprc (False)
2403            mask:          an optional mask retreived from scantable
[2186]2404            applyfft:      if True use some method, such as FFT, to find
2405                           strongest sinusoidal components in the wavenumber
2406                           domain to be used for baseline fitting.
2407                           default is True.
2408            fftmethod:     method to find the strong sinusoidal components.
2409                           now only 'fft' is available and it is the default.
2410            fftthresh:     the threshold to select wave numbers to be used for
2411                           fitting from the distribution of amplitudes in the
2412                           wavenumber domain.
2413                           both float and string values accepted.
2414                           given a float value, the unit is set to sigma.
2415                           for string values, allowed formats include:
2416                               'xsigma' or 'x' (= x-sigma level. e.g., '3sigma'), or
2417                               'topx' (= the x strongest ones, e.g. 'top5').
2418                           default is 3.0 (unit: sigma).
2419            addwn:         the additional wave numbers to be used for fitting.
2420                           list or integer value is accepted to specify every
2421                           wave numbers. also string value can be used in case
2422                           you need to specify wave numbers in a certain range,
2423                           e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
2424                                 '<a'  (= 0,1,...,a-2,a-1),
2425                                 '>=a' (= a, a+1, ... up to the maximum wave
2426                                        number corresponding to the Nyquist
2427                                        frequency for the case of FFT).
2428                           default is [].
2429            rejwn:         the wave numbers NOT to be used for fitting.
2430                           can be set just as addwn but has higher priority:
2431                           wave numbers which are specified both in addwn
2432                           and rejwn will NOT be used. default is [].
[2081]2433            clipthresh:    Clipping threshold. (default is 3.0, unit: sigma)
[2129]2434            clipniter:     maximum number of iteration of 'clipthresh'-sigma clipping (default is 0)
[2081]2435            edge:          an optional number of channel to drop at
2436                           the edge of spectrum. If only one value is
2437                           specified, the same number will be dropped
2438                           from both sides of the spectrum. Default
2439                           is to keep all channels. Nested tuples
2440                           represent individual edge selection for
2441                           different IFs (a number of spectral channels
2442                           can be different)
2443            threshold:     the threshold used by line finder. It is
2444                           better to keep it large as only strong lines
2445                           affect the baseline solution.
2446            chan_avg_limit:a maximum number of consequtive spectral
2447                           channels to average during the search of
2448                           weak and broad lines. The default is no
2449                           averaging (and no search for weak lines).
2450                           If such lines can affect the fitted baseline
2451                           (e.g. a high order polynomial is fitted),
2452                           increase this parameter (usually values up
2453                           to 8 are reasonable). Most users of this
2454                           method should find the default value sufficient.
2455            plot:      *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2456                           plot the fit and the residual. In this each
2457                           indivual fit has to be approved, by typing 'y'
2458                           or 'n'
2459            getresidual:   if False, returns best-fit values instead of
2460                           residual. (default is True)
2461            outlog:        Output the coefficients of the best-fit
2462                           function to logger (default is False)
2463            blfile:        Name of a text file in which the best-fit
2464                           parameter values to be written
2465                           (default is "": no file/logger output)
[2047]2466
2467        Example:
[2186]2468            bscan = scan.auto_sinusoid_baseline(addwn='<=10', insitu=False)
[2081]2469       
2470        Note:
2471            The best-fit parameter values output in logger and/or blfile are now
2472            based on specunit of 'channel'.
[2047]2473        """
2474
[2186]2475        try:
2476            varlist = vars()
[2047]2477
[2186]2478            if insitu is None: insitu = rcParams['insitu']
2479            if insitu:
2480                workscan = self
[2047]2481            else:
[2186]2482                workscan = self.copy()
2483           
2484            if mask           is None: mask           = [True for i in xrange(workscan.nchan())]
2485            if applyfft       is None: applyfft       = True
2486            if fftmethod      is None: fftmethod      = 'fft'
2487            if fftthresh      is None: fftthresh      = 3.0
2488            if addwn          is None: addwn          = []
2489            if rejwn          is None: rejwn          = []
2490            if clipthresh     is None: clipthresh     = 3.0
2491            if clipniter      is None: clipniter      = 0
2492            if edge           is None: edge           = (0,0)
2493            if threshold      is None: threshold      = 3
2494            if chan_avg_limit is None: chan_avg_limit = 1
2495            if plot           is None: plot           = False
2496            if getresidual    is None: getresidual    = True
2497            if outlog         is None: outlog         = False
2498            if blfile         is None: blfile         = ''
[2047]2499
[2081]2500            #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
[2186]2501            workscan._auto_sinusoid_baseline(mask, applyfft, fftmethod.lower(), str(fftthresh).lower(), workscan._parse_wn(addwn), workscan._parse_wn(rejwn), clipthresh, clipniter, normalise_edge_param(edge), threshold, chan_avg_limit, getresidual, outlog, blfile)
[2047]2502            workscan._add_history("auto_sinusoid_baseline", varlist)
2503           
2504            if insitu:
2505                self._assign(workscan)
2506            else:
2507                return workscan
2508           
2509        except RuntimeError, e:
[2186]2510            raise_fitting_failure_exception(e)
[2047]2511
2512    @asaplog_post_dec
[2094]2513    def cspline_baseline(self, insitu=None, mask=None, npiece=None,
2514                         clipthresh=None, clipniter=None, plot=None, getresidual=None, outlog=None, blfile=None):
[1846]2515        """\
[2012]2516        Return a scan which has been baselined (all rows) by cubic spline function (piecewise cubic polynomial).
[513]2517        Parameters:
[2012]2518            insitu:     If False a new scantable is returned.
2519                        Otherwise, the scaling is done in-situ
2520                        The default is taken from .asaprc (False)
2521            mask:       An optional mask
2522            npiece:     Number of pieces. (default is 2)
2523            clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
[2129]2524            clipniter:  maximum number of iteration of 'clipthresh'-sigma clipping (default is 0)
[2012]2525            plot:   *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2526                        plot the fit and the residual. In this each
2527                        indivual fit has to be approved, by typing 'y'
2528                        or 'n'
[2094]2529            getresidual:if False, returns best-fit values instead of
2530                        residual. (default is True)
[2012]2531            outlog:     Output the coefficients of the best-fit
2532                        function to logger (default is False)
2533            blfile:     Name of a text file in which the best-fit
2534                        parameter values to be written
2535                        (default is "": no file/logger output)
[1846]2536
[2012]2537        Example:
2538            # return a scan baselined by a cubic spline consisting of 2 pieces (i.e., 1 internal knot),
2539            # also with 3-sigma clipping, iteration up to 4 times
2540            bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
[2081]2541       
2542        Note:
2543            The best-fit parameter values output in logger and/or blfile are now
2544            based on specunit of 'channel'.
[2012]2545        """
2546       
[2186]2547        try:
2548            varlist = vars()
2549           
2550            if insitu is None: insitu = rcParams['insitu']
2551            if insitu:
2552                workscan = self
2553            else:
2554                workscan = self.copy()
[1855]2555
[2186]2556            if mask        is None: mask        = [True for i in xrange(workscan.nchan())]
2557            if npiece      is None: npiece      = 2
2558            if clipthresh  is None: clipthresh  = 3.0
2559            if clipniter   is None: clipniter   = 0
2560            if plot        is None: plot        = False
2561            if getresidual is None: getresidual = True
2562            if outlog      is None: outlog      = False
2563            if blfile      is None: blfile      = ''
[1855]2564
[2012]2565            #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
[2094]2566            workscan._cspline_baseline(mask, npiece, clipthresh, clipniter, getresidual, outlog, blfile)
[2012]2567            workscan._add_history("cspline_baseline", varlist)
2568           
2569            if insitu:
2570                self._assign(workscan)
2571            else:
2572                return workscan
2573           
2574        except RuntimeError, e:
[2186]2575            raise_fitting_failure_exception(e)
[1855]2576
[2186]2577    @asaplog_post_dec
[2012]2578    def auto_cspline_baseline(self, insitu=None, mask=None, npiece=None, clipthresh=None,
2579                              clipniter=None, edge=None, threshold=None,
[2094]2580                              chan_avg_limit=None, getresidual=None, plot=None, outlog=None, blfile=None):
[2012]2581        """\
2582        Return a scan which has been baselined (all rows) by cubic spline
2583        function (piecewise cubic polynomial).
2584        Spectral lines are detected first using linefinder and masked out
2585        to avoid them affecting the baseline solution.
2586
2587        Parameters:
[794]2588            insitu:     if False a new scantable is returned.
2589                        Otherwise, the scaling is done in-situ
2590                        The default is taken from .asaprc (False)
[2012]2591            mask:       an optional mask retreived from scantable
2592            npiece:     Number of pieces. (default is 2)
2593            clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
[2129]2594            clipniter:  maximum number of iteration of 'clipthresh'-sigma clipping (default is 0)
[2012]2595            edge:       an optional number of channel to drop at
2596                        the edge of spectrum. If only one value is
2597                        specified, the same number will be dropped
2598                        from both sides of the spectrum. Default
2599                        is to keep all channels. Nested tuples
2600                        represent individual edge selection for
2601                        different IFs (a number of spectral channels
2602                        can be different)
2603            threshold:  the threshold used by line finder. It is
2604                        better to keep it large as only strong lines
2605                        affect the baseline solution.
2606            chan_avg_limit:
2607                        a maximum number of consequtive spectral
2608                        channels to average during the search of
2609                        weak and broad lines. The default is no
2610                        averaging (and no search for weak lines).
2611                        If such lines can affect the fitted baseline
2612                        (e.g. a high order polynomial is fitted),
2613                        increase this parameter (usually values up
2614                        to 8 are reasonable). Most users of this
2615                        method should find the default value sufficient.
2616            plot:   *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2617                        plot the fit and the residual. In this each
2618                        indivual fit has to be approved, by typing 'y'
2619                        or 'n'
[2094]2620            getresidual:if False, returns best-fit values instead of
2621                        residual. (default is True)
[2012]2622            outlog:     Output the coefficients of the best-fit
2623                        function to logger (default is False)
2624            blfile:     Name of a text file in which the best-fit
2625                        parameter values to be written
2626                        (default is "": no file/logger output)
[1846]2627
[1907]2628        Example:
[2012]2629            bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
[2081]2630       
2631        Note:
2632            The best-fit parameter values output in logger and/or blfile are now
2633            based on specunit of 'channel'.
[2012]2634        """
[1846]2635
[2186]2636        try:
2637            varlist = vars()
[2012]2638
[2186]2639            if insitu is None: insitu = rcParams['insitu']
2640            if insitu:
2641                workscan = self
[1391]2642            else:
[2186]2643                workscan = self.copy()
2644           
2645            if mask           is None: mask           = [True for i in xrange(workscan.nchan())]
2646            if npiece         is None: npiece         = 2
2647            if clipthresh     is None: clipthresh     = 3.0
2648            if clipniter      is None: clipniter      = 0
2649            if edge           is None: edge           = (0, 0)
2650            if threshold      is None: threshold      = 3
2651            if chan_avg_limit is None: chan_avg_limit = 1
2652            if plot           is None: plot           = False
2653            if getresidual    is None: getresidual    = True
2654            if outlog         is None: outlog         = False
2655            if blfile         is None: blfile         = ''
[1819]2656
[2186]2657            #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
2658            workscan._auto_cspline_baseline(mask, npiece, clipthresh, clipniter, normalise_edge_param(edge), threshold, chan_avg_limit, getresidual, outlog, blfile)
[2012]2659            workscan._add_history("auto_cspline_baseline", varlist)
[1907]2660           
[1856]2661            if insitu:
2662                self._assign(workscan)
2663            else:
2664                return workscan
[2012]2665           
2666        except RuntimeError, e:
[2186]2667            raise_fitting_failure_exception(e)
[513]2668
[1931]2669    @asaplog_post_dec
[2094]2670    def poly_baseline(self, insitu=None, mask=None, order=None, plot=None, getresidual=None, outlog=None, blfile=None):
[1907]2671        """\
2672        Return a scan which has been baselined (all rows) by a polynomial.
2673        Parameters:
[2012]2674            insitu:     if False a new scantable is returned.
2675                        Otherwise, the scaling is done in-situ
2676                        The default is taken from .asaprc (False)
[1907]2677            mask:       an optional mask
2678            order:      the order of the polynomial (default is 0)
2679            plot:       plot the fit and the residual. In this each
2680                        indivual fit has to be approved, by typing 'y'
[2012]2681                        or 'n'
[2094]2682            getresidual:if False, returns best-fit values instead of
2683                        residual. (default is True)
[2012]2684            outlog:     Output the coefficients of the best-fit
2685                        function to logger (default is False)
2686            blfile:     Name of a text file in which the best-fit
2687                        parameter values to be written
2688                        (default is "": no file/logger output)
2689
[1907]2690        Example:
2691            # return a scan baselined by a third order polynomial,
2692            # not using a mask
2693            bscan = scan.poly_baseline(order=3)
2694        """
[1931]2695       
[2186]2696        try:
2697            varlist = vars()
[1931]2698       
[2186]2699            if insitu is None: insitu = rcParams["insitu"]
2700            if insitu:
2701                workscan = self
2702            else:
2703                workscan = self.copy()
[1907]2704
[2186]2705            if mask        is None: mask        = [True for i in xrange(workscan.nchan())]
2706            if order       is None: order       = 0
2707            if plot        is None: plot        = False
2708            if getresidual is None: getresidual = True
2709            if outlog      is None: outlog      = False
2710            if blfile      is None: blfile      = ""
[1907]2711
[2012]2712            if plot:
[2186]2713                outblfile = (blfile != "") and os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
[2012]2714                if outblfile: blf = open(blfile, "a")
2715               
[1907]2716                f = fitter()
2717                f.set_function(lpoly=order)
[2186]2718               
2719                rows = xrange(workscan.nrow())
2720                #if len(rows) > 0: workscan._init_blinfo()
2721               
[1907]2722                for r in rows:
2723                    f.x = workscan._getabcissa(r)
2724                    f.y = workscan._getspectrum(r)
2725                    f.mask = mask_and(mask, workscan._getmask(r))    # (CAS-1434)
2726                    f.data = None
2727                    f.fit()
2728                   
2729                    f.plot(residual=True)
2730                    accept_fit = raw_input("Accept fit ( [y]/n ): ")
2731                    if accept_fit.upper() == "N":
[2012]2732                        #workscan._append_blinfo(None, None, None)
[1907]2733                        continue
[2012]2734                   
2735                    blpars = f.get_parameters()
2736                    masklist = workscan.get_masklist(f.mask, row=r, silent=True)
2737                    #workscan._append_blinfo(blpars, masklist, f.mask)
[2094]2738                    workscan._setspectrum((f.fitter.getresidual() if getresidual else f.fitter.getfit()), r)
[1907]2739                   
[2012]2740                    if outblfile:
2741                        rms = workscan.get_rms(f.mask, r)
2742                        dataout = workscan.format_blparams_row(blpars["params"], blpars["fixed"], rms, str(masklist), r, True)
2743                        blf.write(dataout)
2744
[1907]2745                f._p.unmap()
2746                f._p = None
[2012]2747
2748                if outblfile: blf.close()
[1907]2749            else:
[2094]2750                workscan._poly_baseline(mask, order, getresidual, outlog, blfile)
[1907]2751           
2752            workscan._add_history("poly_baseline", varlist)
2753           
2754            if insitu:
2755                self._assign(workscan)
2756            else:
2757                return workscan
2758           
[1919]2759        except RuntimeError, e:
[2186]2760            raise_fitting_failure_exception(e)
[1907]2761
[2186]2762    @asaplog_post_dec
[2012]2763    def auto_poly_baseline(self, insitu=None, mask=None, order=None, edge=None, threshold=None,
[2094]2764                           chan_avg_limit=None, plot=None, getresidual=None, outlog=None, blfile=None):
[1846]2765        """\
[1931]2766        Return a scan which has been baselined (all rows) by a polynomial.
[880]2767        Spectral lines are detected first using linefinder and masked out
2768        to avoid them affecting the baseline solution.
2769
2770        Parameters:
[2012]2771            insitu:     if False a new scantable is returned.
2772                        Otherwise, the scaling is done in-situ
2773                        The default is taken from .asaprc (False)
[880]2774            mask:       an optional mask retreived from scantable
2775            order:      the order of the polynomial (default is 0)
[2012]2776            edge:       an optional number of channel to drop at
2777                        the edge of spectrum. If only one value is
2778                        specified, the same number will be dropped
2779                        from both sides of the spectrum. Default
2780                        is to keep all channels. Nested tuples
2781                        represent individual edge selection for
2782                        different IFs (a number of spectral channels
2783                        can be different)
2784            threshold:  the threshold used by line finder. It is
2785                        better to keep it large as only strong lines
2786                        affect the baseline solution.
[1280]2787            chan_avg_limit:
[2012]2788                        a maximum number of consequtive spectral
2789                        channels to average during the search of
2790                        weak and broad lines. The default is no
2791                        averaging (and no search for weak lines).
2792                        If such lines can affect the fitted baseline
2793                        (e.g. a high order polynomial is fitted),
2794                        increase this parameter (usually values up
2795                        to 8 are reasonable). Most users of this
2796                        method should find the default value sufficient.
[1061]2797            plot:       plot the fit and the residual. In this each
2798                        indivual fit has to be approved, by typing 'y'
2799                        or 'n'
[2094]2800            getresidual:if False, returns best-fit values instead of
2801                        residual. (default is True)
[2012]2802            outlog:     Output the coefficients of the best-fit
2803                        function to logger (default is False)
2804            blfile:     Name of a text file in which the best-fit
2805                        parameter values to be written
2806                        (default is "": no file/logger output)
[1846]2807
[2012]2808        Example:
2809            bscan = scan.auto_poly_baseline(order=7, insitu=False)
2810        """
[880]2811
[2186]2812        try:
2813            varlist = vars()
[1846]2814
[2186]2815            if insitu is None: insitu = rcParams['insitu']
2816            if insitu:
2817                workscan = self
2818            else:
2819                workscan = self.copy()
[1846]2820
[2186]2821            if mask           is None: mask           = [True for i in xrange(workscan.nchan())]
2822            if order          is None: order          = 0
2823            if edge           is None: edge           = (0, 0)
2824            if threshold      is None: threshold      = 3
2825            if chan_avg_limit is None: chan_avg_limit = 1
2826            if plot           is None: plot           = False
2827            if getresidual    is None: getresidual    = True
2828            if outlog         is None: outlog         = False
2829            if blfile         is None: blfile         = ''
[1846]2830
[2186]2831            edge = normalise_edge_param(edge)
[880]2832
[2012]2833            if plot:
[2186]2834                outblfile = (blfile != "") and os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
[2012]2835                if outblfile: blf = open(blfile, "a")
2836               
[2186]2837                from asap.asaplinefind import linefinder
[2012]2838                fl = linefinder()
2839                fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
2840                fl.set_scan(workscan)
[2186]2841               
[2012]2842                f = fitter()
2843                f.set_function(lpoly=order)
[880]2844
[2186]2845                rows = xrange(workscan.nrow())
2846                #if len(rows) > 0: workscan._init_blinfo()
2847               
[2012]2848                for r in rows:
[2186]2849                    idx = 2*workscan.getif(r)
2850                    fl.find_lines(r, mask_and(mask, workscan._getmask(r)), edge[idx:idx+2])  # (CAS-1434)
[907]2851
[2012]2852                    f.x = workscan._getabcissa(r)
2853                    f.y = workscan._getspectrum(r)
2854                    f.mask = fl.get_mask()
2855                    f.data = None
2856                    f.fit()
2857
2858                    f.plot(residual=True)
2859                    accept_fit = raw_input("Accept fit ( [y]/n ): ")
2860                    if accept_fit.upper() == "N":
2861                        #workscan._append_blinfo(None, None, None)
2862                        continue
2863
2864                    blpars = f.get_parameters()
2865                    masklist = workscan.get_masklist(f.mask, row=r, silent=True)
2866                    #workscan._append_blinfo(blpars, masklist, f.mask)
[2094]2867                    workscan._setspectrum((f.fitter.getresidual() if getresidual else f.fitter.getfit()), r)
[2012]2868
2869                    if outblfile:
2870                        rms = workscan.get_rms(f.mask, r)
2871                        dataout = workscan.format_blparams_row(blpars["params"], blpars["fixed"], rms, str(masklist), r, True)
2872                        blf.write(dataout)
2873                   
2874                f._p.unmap()
2875                f._p = None
2876
2877                if outblfile: blf.close()
2878            else:
[2186]2879                workscan._auto_poly_baseline(mask, order, edge, threshold, chan_avg_limit, getresidual, outlog, blfile)
[2012]2880
2881            workscan._add_history("auto_poly_baseline", varlist)
2882           
2883            if insitu:
2884                self._assign(workscan)
2885            else:
2886                return workscan
2887           
2888        except RuntimeError, e:
[2186]2889            raise_fitting_failure_exception(e)
[2012]2890
2891    def _init_blinfo(self):
2892        """\
2893        Initialise the following three auxiliary members:
2894           blpars : parameters of the best-fit baseline,
2895           masklists : mask data (edge positions of masked channels) and
2896           actualmask : mask data (in boolean list),
2897        to keep for use later (including output to logger/text files).
2898        Used by poly_baseline() and auto_poly_baseline() in case of
2899        'plot=True'.
2900        """
2901        self.blpars = []
2902        self.masklists = []
2903        self.actualmask = []
2904        return
[880]2905
[2012]2906    def _append_blinfo(self, data_blpars, data_masklists, data_actualmask):
2907        """\
2908        Append baseline-fitting related info to blpars, masklist and
2909        actualmask.
2910        """
2911        self.blpars.append(data_blpars)
2912        self.masklists.append(data_masklists)
2913        self.actualmask.append(data_actualmask)
2914        return
2915       
[1862]2916    @asaplog_post_dec
[914]2917    def rotate_linpolphase(self, angle):
[1846]2918        """\
[914]2919        Rotate the phase of the complex polarization O=Q+iU correlation.
2920        This is always done in situ in the raw data.  So if you call this
2921        function more than once then each call rotates the phase further.
[1846]2922
[914]2923        Parameters:
[1846]2924
[914]2925            angle:   The angle (degrees) to rotate (add) by.
[1846]2926
2927        Example::
2928
[914]2929            scan.rotate_linpolphase(2.3)
[1846]2930
[914]2931        """
2932        varlist = vars()
[936]2933        self._math._rotate_linpolphase(self, angle)
[914]2934        self._add_history("rotate_linpolphase", varlist)
2935        return
[710]2936
[1862]2937    @asaplog_post_dec
[914]2938    def rotate_xyphase(self, angle):
[1846]2939        """\
[914]2940        Rotate the phase of the XY correlation.  This is always done in situ
2941        in the data.  So if you call this function more than once
2942        then each call rotates the phase further.
[1846]2943
[914]2944        Parameters:
[1846]2945
[914]2946            angle:   The angle (degrees) to rotate (add) by.
[1846]2947
2948        Example::
2949
[914]2950            scan.rotate_xyphase(2.3)
[1846]2951
[914]2952        """
2953        varlist = vars()
[936]2954        self._math._rotate_xyphase(self, angle)
[914]2955        self._add_history("rotate_xyphase", varlist)
2956        return
2957
[1862]2958    @asaplog_post_dec
[914]2959    def swap_linears(self):
[1846]2960        """\
[1573]2961        Swap the linear polarisations XX and YY, or better the first two
[1348]2962        polarisations as this also works for ciculars.
[914]2963        """
2964        varlist = vars()
[936]2965        self._math._swap_linears(self)
[914]2966        self._add_history("swap_linears", varlist)
2967        return
2968
[1862]2969    @asaplog_post_dec
[914]2970    def invert_phase(self):
[1846]2971        """\
[914]2972        Invert the phase of the complex polarisation
2973        """
2974        varlist = vars()
[936]2975        self._math._invert_phase(self)
[914]2976        self._add_history("invert_phase", varlist)
2977        return
2978
[1862]2979    @asaplog_post_dec
[876]2980    def add(self, offset, insitu=None):
[1846]2981        """\
[513]2982        Return a scan where all spectra have the offset added
[1846]2983
[513]2984        Parameters:
[1846]2985
[513]2986            offset:      the offset
[1855]2987
[513]2988            insitu:      if False a new scantable is returned.
2989                         Otherwise, the scaling is done in-situ
2990                         The default is taken from .asaprc (False)
[1846]2991
[513]2992        """
2993        if insitu is None: insitu = rcParams['insitu']
[876]2994        self._math._setinsitu(insitu)
[513]2995        varlist = vars()
[876]2996        s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]2997        s._add_history("add", varlist)
[876]2998        if insitu:
2999            self._assign(s)
3000        else:
[513]3001            return s
3002
[1862]3003    @asaplog_post_dec
[1308]3004    def scale(self, factor, tsys=True, insitu=None):
[1846]3005        """\
3006
[1938]3007        Return a scan where all spectra are scaled by the given 'factor'
[1846]3008
[513]3009        Parameters:
[1846]3010
[1819]3011            factor:      the scaling factor (float or 1D float list)
[1855]3012
[513]3013            insitu:      if False a new scantable is returned.
3014                         Otherwise, the scaling is done in-situ
3015                         The default is taken from .asaprc (False)
[1855]3016
[513]3017            tsys:        if True (default) then apply the operation to Tsys
3018                         as well as the data
[1846]3019
[513]3020        """
3021        if insitu is None: insitu = rcParams['insitu']
[876]3022        self._math._setinsitu(insitu)
[513]3023        varlist = vars()
[1819]3024        s = None
3025        import numpy
3026        if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
3027            if isinstance(factor[0], list) or isinstance(factor[0], numpy.ndarray):
3028                from asapmath import _array2dOp
3029                s = _array2dOp( self.copy(), factor, "MUL", tsys )
3030            else:
3031                s = scantable( self._math._arrayop( self.copy(), factor, "MUL", tsys ) )
3032        else:
3033            s = scantable(self._math._unaryop(self.copy(), factor, "MUL", tsys))
[1118]3034        s._add_history("scale", varlist)
[876]3035        if insitu:
3036            self._assign(s)
3037        else:
[513]3038            return s
3039
[1504]3040    def set_sourcetype(self, match, matchtype="pattern",
3041                       sourcetype="reference"):
[1846]3042        """\
[1502]3043        Set the type of the source to be an source or reference scan
[1846]3044        using the provided pattern.
3045
[1502]3046        Parameters:
[1846]3047
[1504]3048            match:          a Unix style pattern, regular expression or selector
[1855]3049
[1504]3050            matchtype:      'pattern' (default) UNIX style pattern or
3051                            'regex' regular expression
[1855]3052
[1502]3053            sourcetype:     the type of the source to use (source/reference)
[1846]3054
[1502]3055        """
3056        varlist = vars()
3057        basesel = self.get_selection()
3058        stype = -1
3059        if sourcetype.lower().startswith("r"):
3060            stype = 1
3061        elif sourcetype.lower().startswith("s"):
3062            stype = 0
[1504]3063        else:
[1502]3064            raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
[1504]3065        if matchtype.lower().startswith("p"):
3066            matchtype = "pattern"
3067        elif matchtype.lower().startswith("r"):
3068            matchtype = "regex"
3069        else:
3070            raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
[1502]3071        sel = selector()
3072        if isinstance(match, selector):
3073            sel = match
3074        else:
[1504]3075            sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
[1502]3076        self.set_selection(basesel+sel)
3077        self._setsourcetype(stype)
3078        self.set_selection(basesel)
[1573]3079        self._add_history("set_sourcetype", varlist)
[1502]3080
[1862]3081    @asaplog_post_dec
[1857]3082    @preserve_selection
[1819]3083    def auto_quotient(self, preserve=True, mode='paired', verify=False):
[1846]3084        """\
[670]3085        This function allows to build quotients automatically.
[1819]3086        It assumes the observation to have the same number of
[670]3087        "ons" and "offs"
[1846]3088
[670]3089        Parameters:
[1846]3090
[710]3091            preserve:       you can preserve (default) the continuum or
3092                            remove it.  The equations used are
[1857]3093
[670]3094                            preserve: Output = Toff * (on/off) - Toff
[1857]3095
[1070]3096                            remove:   Output = Toff * (on/off) - Ton
[1855]3097
[1573]3098            mode:           the on/off detection mode
[1348]3099                            'paired' (default)
3100                            identifies 'off' scans by the
3101                            trailing '_R' (Mopra/Parkes) or
3102                            '_e'/'_w' (Tid) and matches
3103                            on/off pairs from the observing pattern
[1502]3104                            'time'
3105                            finds the closest off in time
[1348]3106
[1857]3107        .. todo:: verify argument is not implemented
3108
[670]3109        """
[1857]3110        varlist = vars()
[1348]3111        modes = ["time", "paired"]
[670]3112        if not mode in modes:
[876]3113            msg = "please provide valid mode. Valid modes are %s" % (modes)
3114            raise ValueError(msg)
[1348]3115        s = None
3116        if mode.lower() == "paired":
[1857]3117            sel = self.get_selection()
[1875]3118            sel.set_query("SRCTYPE==psoff")
[1356]3119            self.set_selection(sel)
[1348]3120            offs = self.copy()
[1875]3121            sel.set_query("SRCTYPE==pson")
[1356]3122            self.set_selection(sel)
[1348]3123            ons = self.copy()
3124            s = scantable(self._math._quotient(ons, offs, preserve))
3125        elif mode.lower() == "time":
3126            s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]3127        s._add_history("auto_quotient", varlist)
[876]3128        return s
[710]3129
[1862]3130    @asaplog_post_dec
[1145]3131    def mx_quotient(self, mask = None, weight='median', preserve=True):
[1846]3132        """\
[1143]3133        Form a quotient using "off" beams when observing in "MX" mode.
[1846]3134
[1143]3135        Parameters:
[1846]3136
[1145]3137            mask:           an optional mask to be used when weight == 'stddev'
[1855]3138
[1143]3139            weight:         How to average the off beams.  Default is 'median'.
[1855]3140
[1145]3141            preserve:       you can preserve (default) the continuum or
[1855]3142                            remove it.  The equations used are:
[1846]3143
[1855]3144                                preserve: Output = Toff * (on/off) - Toff
3145
3146                                remove:   Output = Toff * (on/off) - Ton
3147
[1217]3148        """
[1593]3149        mask = mask or ()
[1141]3150        varlist = vars()
3151        on = scantable(self._math._mx_extract(self, 'on'))
[1143]3152        preoff = scantable(self._math._mx_extract(self, 'off'))
3153        off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1217]3154        from asapmath  import quotient
[1145]3155        q = quotient(on, off, preserve)
[1143]3156        q._add_history("mx_quotient", varlist)
[1217]3157        return q
[513]3158
[1862]3159    @asaplog_post_dec
[718]3160    def freq_switch(self, insitu=None):
[1846]3161        """\
[718]3162        Apply frequency switching to the data.
[1846]3163
[718]3164        Parameters:
[1846]3165
[718]3166            insitu:      if False a new scantable is returned.
3167                         Otherwise, the swictching is done in-situ
3168                         The default is taken from .asaprc (False)
[1846]3169
[718]3170        """
3171        if insitu is None: insitu = rcParams['insitu']
[876]3172        self._math._setinsitu(insitu)
[718]3173        varlist = vars()
[876]3174        s = scantable(self._math._freqswitch(self))
[1118]3175        s._add_history("freq_switch", varlist)
[1856]3176        if insitu:
3177            self._assign(s)
3178        else:
3179            return s
[718]3180
[1862]3181    @asaplog_post_dec
[780]3182    def recalc_azel(self):
[1846]3183        """Recalculate the azimuth and elevation for each position."""
[780]3184        varlist = vars()
[876]3185        self._recalcazel()
[780]3186        self._add_history("recalc_azel", varlist)
3187        return
3188
[1862]3189    @asaplog_post_dec
[513]3190    def __add__(self, other):
3191        varlist = vars()
3192        s = None
3193        if isinstance(other, scantable):
[1573]3194            s = scantable(self._math._binaryop(self, other, "ADD"))
[513]3195        elif isinstance(other, float):
[876]3196            s = scantable(self._math._unaryop(self, other, "ADD", False))
[2144]3197        elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3198            if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
3199                from asapmath import _array2dOp
3200                s = _array2dOp( self.copy(), other, "ADD", False )
3201            else:
3202                s = scantable( self._math._arrayop( self.copy(), other, "ADD", False ) )
[513]3203        else:
[718]3204            raise TypeError("Other input is not a scantable or float value")
[513]3205        s._add_history("operator +", varlist)
3206        return s
3207
[1862]3208    @asaplog_post_dec
[513]3209    def __sub__(self, other):
3210        """
3211        implicit on all axes and on Tsys
3212        """
3213        varlist = vars()
3214        s = None
3215        if isinstance(other, scantable):
[1588]3216            s = scantable(self._math._binaryop(self, other, "SUB"))
[513]3217        elif isinstance(other, float):
[876]3218            s = scantable(self._math._unaryop(self, other, "SUB", False))
[2144]3219        elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3220            if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
3221                from asapmath import _array2dOp
3222                s = _array2dOp( self.copy(), other, "SUB", False )
3223            else:
3224                s = scantable( self._math._arrayop( self.copy(), other, "SUB", False ) )
[513]3225        else:
[718]3226            raise TypeError("Other input is not a scantable or float value")
[513]3227        s._add_history("operator -", varlist)
3228        return s
[710]3229
[1862]3230    @asaplog_post_dec
[513]3231    def __mul__(self, other):
3232        """
3233        implicit on all axes and on Tsys
3234        """
3235        varlist = vars()
3236        s = None
3237        if isinstance(other, scantable):
[1588]3238            s = scantable(self._math._binaryop(self, other, "MUL"))
[513]3239        elif isinstance(other, float):
[876]3240            s = scantable(self._math._unaryop(self, other, "MUL", False))
[2144]3241        elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3242            if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
3243                from asapmath import _array2dOp
3244                s = _array2dOp( self.copy(), other, "MUL", False )
3245            else:
3246                s = scantable( self._math._arrayop( self.copy(), other, "MUL", False ) )
[513]3247        else:
[718]3248            raise TypeError("Other input is not a scantable or float value")
[513]3249        s._add_history("operator *", varlist)
3250        return s
3251
[710]3252
[1862]3253    @asaplog_post_dec
[513]3254    def __div__(self, other):
3255        """
3256        implicit on all axes and on Tsys
3257        """
3258        varlist = vars()
3259        s = None
3260        if isinstance(other, scantable):
[1589]3261            s = scantable(self._math._binaryop(self, other, "DIV"))
[513]3262        elif isinstance(other, float):
3263            if other == 0.0:
[718]3264                raise ZeroDivisionError("Dividing by zero is not recommended")
[876]3265            s = scantable(self._math._unaryop(self, other, "DIV", False))
[2144]3266        elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3267            if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
3268                from asapmath import _array2dOp
3269                s = _array2dOp( self.copy(), other, "DIV", False )
3270            else:
3271                s = scantable( self._math._arrayop( self.copy(), other, "DIV", False ) )
[513]3272        else:
[718]3273            raise TypeError("Other input is not a scantable or float value")
[513]3274        s._add_history("operator /", varlist)
3275        return s
3276
[1862]3277    @asaplog_post_dec
[530]3278    def get_fit(self, row=0):
[1846]3279        """\
[530]3280        Print or return the stored fits for a row in the scantable
[1846]3281
[530]3282        Parameters:
[1846]3283
[530]3284            row:    the row which the fit has been applied to.
[1846]3285
[530]3286        """
3287        if row > self.nrow():
3288            return
[976]3289        from asap.asapfit import asapfit
[530]3290        fit = asapfit(self._getfit(row))
[1859]3291        asaplog.push( '%s' %(fit) )
3292        return fit.as_dict()
[530]3293
[1483]3294    def flag_nans(self):
[1846]3295        """\
[1483]3296        Utility function to flag NaN values in the scantable.
3297        """
3298        import numpy
3299        basesel = self.get_selection()
3300        for i in range(self.nrow()):
[1589]3301            sel = self.get_row_selector(i)
3302            self.set_selection(basesel+sel)
[1483]3303            nans = numpy.isnan(self._getspectrum(0))
3304        if numpy.any(nans):
3305            bnans = [ bool(v) for v in nans]
3306            self.flag(bnans)
3307        self.set_selection(basesel)
3308
[1588]3309    def get_row_selector(self, rowno):
[1992]3310        #return selector(beams=self.getbeam(rowno),
3311        #                ifs=self.getif(rowno),
3312        #                pols=self.getpol(rowno),
3313        #                scans=self.getscan(rowno),
3314        #                cycles=self.getcycle(rowno))
3315        return selector(rows=[rowno])
[1573]3316
[484]3317    def _add_history(self, funcname, parameters):
[1435]3318        if not rcParams['scantable.history']:
3319            return
[484]3320        # create date
3321        sep = "##"
3322        from datetime import datetime
3323        dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
3324        hist = dstr+sep
3325        hist += funcname+sep#cdate+sep
3326        if parameters.has_key('self'): del parameters['self']
[1118]3327        for k, v in parameters.iteritems():
[484]3328            if type(v) is dict:
[1118]3329                for k2, v2 in v.iteritems():
[484]3330                    hist += k2
3331                    hist += "="
[1118]3332                    if isinstance(v2, scantable):
[484]3333                        hist += 'scantable'
3334                    elif k2 == 'mask':
[1118]3335                        if isinstance(v2, list) or isinstance(v2, tuple):
[513]3336                            hist += str(self._zip_mask(v2))
3337                        else:
3338                            hist += str(v2)
[484]3339                    else:
[513]3340                        hist += str(v2)
[484]3341            else:
3342                hist += k
3343                hist += "="
[1118]3344                if isinstance(v, scantable):
[484]3345                    hist += 'scantable'
3346                elif k == 'mask':
[1118]3347                    if isinstance(v, list) or isinstance(v, tuple):
[513]3348                        hist += str(self._zip_mask(v))
3349                    else:
3350                        hist += str(v)
[484]3351                else:
3352                    hist += str(v)
3353            hist += sep
3354        hist = hist[:-2] # remove trailing '##'
3355        self._addhistory(hist)
3356
[710]3357
[484]3358    def _zip_mask(self, mask):
3359        mask = list(mask)
3360        i = 0
3361        segments = []
3362        while mask[i:].count(1):
3363            i += mask[i:].index(1)
3364            if mask[i:].count(0):
3365                j = i + mask[i:].index(0)
3366            else:
[710]3367                j = len(mask)
[1118]3368            segments.append([i, j])
[710]3369            i = j
[484]3370        return segments
[714]3371
[626]3372    def _get_ordinate_label(self):
3373        fu = "("+self.get_fluxunit()+")"
3374        import re
3375        lbl = "Intensity"
[1118]3376        if re.match(".K.", fu):
[626]3377            lbl = "Brightness Temperature "+ fu
[1118]3378        elif re.match(".Jy.", fu):
[626]3379            lbl = "Flux density "+ fu
3380        return lbl
[710]3381
[876]3382    def _check_ifs(self):
[1986]3383        #nchans = [self.nchan(i) for i in range(self.nif(-1))]
3384        nchans = [self.nchan(i) for i in self.getifnos()]
[2004]3385        nchans = filter(lambda t: t > 0, nchans)
[876]3386        return (sum(nchans)/len(nchans) == nchans[0])
[976]3387
[1862]3388    @asaplog_post_dec
[1916]3389    #def _fill(self, names, unit, average, getpt, antenna):
3390    def _fill(self, names, unit, average, opts={}):
[976]3391        first = True
3392        fullnames = []
3393        for name in names:
3394            name = os.path.expandvars(name)
3395            name = os.path.expanduser(name)
3396            if not os.path.exists(name):
3397                msg = "File '%s' does not exists" % (name)
3398                raise IOError(msg)
3399            fullnames.append(name)
3400        if average:
3401            asaplog.push('Auto averaging integrations')
[1079]3402        stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]3403        for name in fullnames:
[1073]3404            tbl = Scantable(stype)
[2004]3405            if is_ms( name ):
3406                r = msfiller( tbl )
3407            else:
3408                r = filler( tbl )
3409                rx = rcParams['scantable.reference']
3410                r.setreferenceexpr(rx)
3411            #r = filler(tbl)
3412            #rx = rcParams['scantable.reference']
3413            #r.setreferenceexpr(rx)
[976]3414            msg = "Importing %s..." % (name)
[1118]3415            asaplog.push(msg, False)
[1916]3416            #opts = {'ms': {'antenna' : antenna, 'getpt': getpt} }
[1904]3417            r.open(name, opts)# antenna, -1, -1, getpt)
[1843]3418            r.fill()
[976]3419            if average:
[1118]3420                tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]3421            if not first:
3422                tbl = self._math._merge([self, tbl])
3423            Scantable.__init__(self, tbl)
[1843]3424            r.close()
[1118]3425            del r, tbl
[976]3426            first = False
[1861]3427            #flush log
3428        asaplog.post()
[976]3429        if unit is not None:
3430            self.set_fluxunit(unit)
[1824]3431        if not is_casapy():
3432            self.set_freqframe(rcParams['scantable.freqframe'])
[976]3433
[2012]3434
[1402]3435    def __getitem__(self, key):
3436        if key < 0:
3437            key += self.nrow()
3438        if key >= self.nrow():
3439            raise IndexError("Row index out of range.")
3440        return self._getspectrum(key)
3441
3442    def __setitem__(self, key, value):
3443        if key < 0:
3444            key += self.nrow()
3445        if key >= self.nrow():
3446            raise IndexError("Row index out of range.")
3447        if not hasattr(value, "__len__") or \
3448                len(value) > self.nchan(self.getif(key)):
3449            raise ValueError("Spectrum length doesn't match.")
3450        return self._setspectrum(value, key)
3451
3452    def __len__(self):
3453        return self.nrow()
3454
3455    def __iter__(self):
3456        for i in range(len(self)):
3457            yield self[i]
Note: See TracBrowser for help on using the repository browser.