source: trunk/python/scantable.py @ 2322

Last change on this file since 2322 was 2322, checked in by Malte Marquarding, 13 years ago

Fixed another instance of inserting a new method argument. THIS NEEDS TO GO TO THE END!

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 135.7 KB
RevLine 
[1846]1"""This module defines the scantable class."""
2
[1697]3import os
[2315]4import tempfile
[1948]5import numpy
[1691]6try:
7    from functools import wraps as wraps_dec
8except ImportError:
9    from asap.compatibility import wraps as wraps_dec
10
[1824]11from asap.env import is_casapy
[876]12from asap._asap import Scantable
[2004]13from asap._asap import filler, msfiller
[1824]14from asap.parameters import rcParams
[1862]15from asap.logging import asaplog, asaplog_post_dec
[1824]16from asap.selector import selector
17from asap.linecatalog import linecatalog
[1600]18from asap.coordinate import coordinate
[1859]19from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
[1907]20from asap.asapfitter import fitter
[102]21
[1689]22
23def preserve_selection(func):
[1691]24    @wraps_dec(func)
[1689]25    def wrap(obj, *args, **kw):
26        basesel = obj.get_selection()
[1857]27        try:
28            val = func(obj, *args, **kw)
29        finally:
30            obj.set_selection(basesel)
[1689]31        return val
32    return wrap
33
[1846]34def is_scantable(filename):
35    """Is the given file a scantable?
[1689]36
[1846]37    Parameters:
38
39        filename: the name of the file/directory to test
40
41    """
[1883]42    if ( os.path.isdir(filename)
43         and os.path.exists(filename+'/table.info')
44         and os.path.exists(filename+'/table.dat') ):
45        f=open(filename+'/table.info')
46        l=f.readline()
47        f.close()
48        #if ( l.find('Scantable') != -1 ):
49        if ( l.find('Measurement Set') == -1 ):
50            return True
51        else:
52            return False
53    else:
54        return False
55##     return (os.path.isdir(filename)
56##             and not os.path.exists(filename+'/table.f1')
57##             and os.path.exists(filename+'/table.info'))
[1697]58
[1883]59def is_ms(filename):
60    """Is the given file a MeasurementSet?
[1697]61
[1883]62    Parameters:
63
64        filename: the name of the file/directory to test
65
66    """
67    if ( os.path.isdir(filename)
68         and os.path.exists(filename+'/table.info')
69         and os.path.exists(filename+'/table.dat') ):
70        f=open(filename+'/table.info')
71        l=f.readline()
72        f.close()
73        if ( l.find('Measurement Set') != -1 ):
74            return True
75        else:
76            return False
77    else:
78        return False
[2186]79
80def normalise_edge_param(edge):
81    """\
82    Convert a given edge value to a one-dimensional array that can be
83    given to baseline-fitting/subtraction functions.
84    The length of the output value will be an even because values for
85    the both sides of spectra are to be contained for each IF. When
86    the length is 2, the values will be applied to all IFs. If the length
87    is larger than 2, it will be 2*ifnos().
88    Accepted format of edge include:
89            * an integer - will be used for both sides of spectra of all IFs.
90                  e.g. 10 is converted to [10,10]
[2277]91            * an empty list/tuple [] - converted to [0, 0] and used for all IFs.
[2186]92            * a list/tuple containing an integer - same as the above case.
93                  e.g. [10] is converted to [10,10]
94            * a list/tuple containing two integers - will be used for all IFs.
95                  e.g. [5,10] is output as it is. no need to convert.
96            * a list/tuple of lists/tuples containing TWO integers -
97                  each element of edge will be used for each IF.
[2277]98                  e.g. [[5,10],[15,20]] - [5,10] for IF[0] and [15,20] for IF[1].
99                 
100                  If an element contains the same integer values, the input 'edge'
101                  parameter can be given in a simpler shape in the following cases:
[2186]102                      ** when len(edge)!=2
[2277]103                          any elements containing the same values can be replaced
104                          to single integers.
105                          e.g. [[15,15]] can be simplified to [15] (or [15,15] or 15 also).
106                          e.g. [[1,1],[2,2],[3,3]] can be simplified to [1,2,3].
[2186]107                      ** when len(edge)=2
108                          care is needed for this case: ONLY ONE of the
109                          elements can be a single integer,
110                          e.g. [[5,5],[10,10]] can be simplified to [5,[10,10]]
[2277]111                               or [[5,5],10], but can NOT be simplified to [5,10].
[2186]112                               when [5,10] given, it is interpreted as
[2277]113                               [[5,10],[5,10],[5,10],...] instead, as shown before.
[2186]114    """
115    from asap import _is_sequence_or_number as _is_valid
116    if isinstance(edge, list) or isinstance(edge, tuple):
117        for edgepar in edge:
118            if not _is_valid(edgepar, int):
119                raise ValueError, "Each element of the 'edge' tuple has \
120                                   to be a pair of integers or an integer."
121            if isinstance(edgepar, list) or isinstance(edgepar, tuple):
122                if len(edgepar) != 2:
123                    raise ValueError, "Each element of the 'edge' tuple has \
124                                       to be a pair of integers or an integer."
125    else:
126        if not _is_valid(edge, int):
127            raise ValueError, "Parameter 'edge' has to be an integer or a \
128                               pair of integers specified as a tuple. \
129                               Nested tuples are allowed \
130                               to make individual selection for different IFs."
131       
132
133    if isinstance(edge, int):
134        edge = [ edge, edge ]                 # e.g. 3   => [3,3]
135    elif isinstance(edge, list) or isinstance(edge, tuple):
136        if len(edge) == 0:
137            edge = [0, 0]                     # e.g. []  => [0,0]
138        elif len(edge) == 1:
139            if isinstance(edge[0], int):
140                edge = [ edge[0], edge[0] ]   # e.g. [1] => [1,1]
141
142    commonedge = True
143    if len(edge) > 2: commonedge = False
144    else:
145        for edgepar in edge:
146            if isinstance(edgepar, list) or isinstance(edgepar, tuple):
147                commonedge = False
148                break
149
150    if commonedge:
151        if len(edge) > 1:
152            norm_edge = edge
153        else:
154            norm_edge = edge + edge
155    else:
156        norm_edge = []
157        for edgepar in edge:
158            if isinstance(edgepar, int):
159                norm_edge += [edgepar, edgepar]
160            else:
161                norm_edge += edgepar
162
163    return norm_edge
164
165def raise_fitting_failure_exception(e):
166    msg = "The fit failed, possibly because it didn't converge."
167    if rcParams["verbose"]:
168        asaplog.push(str(e))
169        asaplog.push(str(msg))
170    else:
171        raise RuntimeError(str(e)+'\n'+msg)
172
[2189]173def pack_progress_params(showprogress, minnrow):
174    return str(showprogress).lower() + ',' + str(minnrow)
175
[876]176class scantable(Scantable):
[1846]177    """\
178        The ASAP container for scans (single-dish data).
[102]179    """
[1819]180
[1862]181    @asaplog_post_dec
[1916]182    #def __init__(self, filename, average=None, unit=None, getpt=None,
183    #             antenna=None, parallactify=None):
[2315]184    def __init__(self, filename, average=None, unit=None, parallactify=None,
185                 **args):
[1846]186        """\
[102]187        Create a scantable from a saved one or make a reference
[1846]188
[102]189        Parameters:
[1846]190
191            filename:     the name of an asap table on disk
192                          or
193                          the name of a rpfits/sdfits/ms file
194                          (integrations within scans are auto averaged
195                          and the whole file is read) or
196                          [advanced] a reference to an existing scantable
197
198            average:      average all integrations withinb a scan on read.
199                          The default (True) is taken from .asaprc.
200
[484]201            unit:         brightness unit; must be consistent with K or Jy.
[1846]202                          Over-rides the default selected by the filler
203                          (input rpfits/sdfits/ms) or replaces the value
204                          in existing scantables
205
206            getpt:        for MeasurementSet input data only:
207                          If True, all pointing data are filled.
208                          The deafult is False, which makes time to load
209                          the MS data faster in some cases.
210
[1920]211            antenna:      for MeasurementSet input data only:
[2277]212                          Antenna selection. integer (id) or string (name or id).
[1846]213
214            parallactify: Indicate that the data had been parallatified. Default
215                          is taken from rc file.
216
[710]217        """
[976]218        if average is None:
[710]219            average = rcParams['scantable.autoaverage']
[1916]220        #if getpt is None:
221        #    getpt = True
222        #if antenna is not None:
223        #    asaplog.push("Antenna selection currently unsupported."
224        #                 "Using ''")
225        #    asaplog.post('WARN')
226        #if antenna is None:
227        #    antenna = ''
228        #elif type(antenna) == int:
229        #    antenna = '%s' % antenna
230        #elif type(antenna) == list:
231        #    tmpstr = ''
232        #    for i in range( len(antenna) ):
233        #        if type(antenna[i]) == int:
234        #            tmpstr = tmpstr + ('%s,'%(antenna[i]))
235        #        elif type(antenna[i]) == str:
236        #            tmpstr=tmpstr+antenna[i]+','
237        #        else:
238        #            raise TypeError('Bad antenna selection.')
239        #    antenna = tmpstr.rstrip(',')
[1593]240        parallactify = parallactify or rcParams['scantable.parallactify']
[1259]241        varlist = vars()
[876]242        from asap._asap import stmath
[1819]243        self._math = stmath( rcParams['insitu'] )
[876]244        if isinstance(filename, Scantable):
245            Scantable.__init__(self, filename)
[181]246        else:
[1697]247            if isinstance(filename, str):
[976]248                filename = os.path.expandvars(filename)
249                filename = os.path.expanduser(filename)
250                if not os.path.exists(filename):
251                    s = "File '%s' not found." % (filename)
252                    raise IOError(s)
[1697]253                if is_scantable(filename):
254                    ondisk = rcParams['scantable.storage'] == 'disk'
255                    Scantable.__init__(self, filename, ondisk)
256                    if unit is not None:
257                        self.set_fluxunit(unit)
[2008]258                    if average:
259                        self._assign( self.average_time( scanav=True ) )
[1819]260                    # do not reset to the default freqframe
261                    #self.set_freqframe(rcParams['scantable.freqframe'])
[1883]262                #elif os.path.isdir(filename) \
263                #         and not os.path.exists(filename+'/table.f1'):
264                elif is_ms(filename):
[1916]265                    # Measurement Set
266                    opts={'ms': {}}
267                    mskeys=['getpt','antenna']
268                    for key in mskeys:
269                        if key in args.keys():
270                            opts['ms'][key] = args[key]
271                    #self._fill([filename], unit, average, getpt, antenna)
272                    self._fill([filename], unit, average, opts)
[1893]273                elif os.path.isfile(filename):
[1916]274                    #self._fill([filename], unit, average, getpt, antenna)
275                    self._fill([filename], unit, average)
[1883]276                else:
[1819]277                    msg = "The given file '%s'is not a valid " \
278                          "asap table." % (filename)
[1859]279                    raise IOError(msg)
[1118]280            elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]281                  and isinstance(filename[-1], str):
[1916]282                #self._fill(filename, unit, average, getpt, antenna)
283                self._fill(filename, unit, average)
[1586]284        self.parallactify(parallactify)
[1259]285        self._add_history("scantable", varlist)
[102]286
[1862]287    @asaplog_post_dec
[876]288    def save(self, name=None, format=None, overwrite=False):
[1846]289        """\
[1280]290        Store the scantable on disk. This can be an asap (aips++) Table,
291        SDFITS or MS2 format.
[1846]292
[116]293        Parameters:
[1846]294
[1093]295            name:        the name of the outputfile. For format "ASCII"
296                         this is the root file name (data in 'name'.txt
[497]297                         and header in 'name'_header.txt)
[1855]298
[116]299            format:      an optional file format. Default is ASAP.
[1855]300                         Allowed are:
301
302                            * 'ASAP' (save as ASAP [aips++] Table),
303                            * 'SDFITS' (save as SDFITS file)
304                            * 'ASCII' (saves as ascii text file)
305                            * 'MS2' (saves as an casacore MeasurementSet V2)
[2315]306                            * 'FITS' (save as image FITS - not readable by
307                                      class)
[1855]308                            * 'CLASS' (save as FITS readable by CLASS)
309
[411]310            overwrite:   If the file should be overwritten if it exists.
[256]311                         The default False is to return with warning
[411]312                         without writing the output. USE WITH CARE.
[1855]313
[1846]314        Example::
315
[116]316            scan.save('myscan.asap')
[1118]317            scan.save('myscan.sdfits', 'SDFITS')
[1846]318
[116]319        """
[411]320        from os import path
[1593]321        format = format or rcParams['scantable.save']
[256]322        suffix = '.'+format.lower()
[1118]323        if name is None or name == "":
[256]324            name = 'scantable'+suffix
[718]325            msg = "No filename given. Using default name %s..." % name
326            asaplog.push(msg)
[411]327        name = path.expandvars(name)
[256]328        if path.isfile(name) or path.isdir(name):
329            if not overwrite:
[718]330                msg = "File %s exists." % name
[1859]331                raise IOError(msg)
[451]332        format2 = format.upper()
333        if format2 == 'ASAP':
[116]334            self._save(name)
[2029]335        elif format2 == 'MS2':
336            msopt = {'ms': {'overwrite': overwrite } }
337            from asap._asap import mswriter
338            writer = mswriter( self )
339            writer.write( name, msopt )
[116]340        else:
[989]341            from asap._asap import stwriter as stw
[1118]342            writer = stw(format2)
343            writer.write(self, name)
[116]344        return
345
[102]346    def copy(self):
[1846]347        """Return a copy of this scantable.
348
349        *Note*:
350
[1348]351            This makes a full (deep) copy. scan2 = scan1 makes a reference.
[1846]352
353        Example::
354
[102]355            copiedscan = scan.copy()
[1846]356
[102]357        """
[876]358        sd = scantable(Scantable._copy(self))
[113]359        return sd
360
[1093]361    def drop_scan(self, scanid=None):
[1846]362        """\
[1093]363        Return a new scantable where the specified scan number(s) has(have)
364        been dropped.
[1846]365
[1093]366        Parameters:
[1846]367
[1093]368            scanid:    a (list of) scan number(s)
[1846]369
[1093]370        """
371        from asap import _is_sequence_or_number as _is_valid
372        from asap import _to_list
373        from asap import unique
374        if not _is_valid(scanid):
[2315]375            raise RuntimeError( 'Please specify a scanno to drop from the'
376                                ' scantable' )
[1859]377        scanid = _to_list(scanid)
378        allscans = unique([ self.getscan(i) for i in range(self.nrow())])
379        for sid in scanid: allscans.remove(sid)
380        if len(allscans) == 0:
381            raise ValueError("Can't remove all scans")
382        sel = selector(scans=allscans)
383        return self._select_copy(sel)
[1093]384
[1594]385    def _select_copy(self, selection):
386        orig = self.get_selection()
387        self.set_selection(orig+selection)
388        cp = self.copy()
389        self.set_selection(orig)
390        return cp
391
[102]392    def get_scan(self, scanid=None):
[1855]393        """\
[102]394        Return a specific scan (by scanno) or collection of scans (by
395        source name) in a new scantable.
[1846]396
397        *Note*:
398
[1348]399            See scantable.drop_scan() for the inverse operation.
[1846]400
[102]401        Parameters:
[1846]402
[513]403            scanid:    a (list of) scanno or a source name, unix-style
404                       patterns are accepted for source name matching, e.g.
405                       '*_R' gets all 'ref scans
[1846]406
407        Example::
408
[513]409            # get all scans containing the source '323p459'
410            newscan = scan.get_scan('323p459')
411            # get all 'off' scans
412            refscans = scan.get_scan('*_R')
413            # get a susbset of scans by scanno (as listed in scan.summary())
[1118]414            newscan = scan.get_scan([0, 2, 7, 10])
[1846]415
[102]416        """
417        if scanid is None:
[1859]418            raise RuntimeError( 'Please specify a scan no or name to '
419                                'retrieve from the scantable' )
[102]420        try:
[946]421            bsel = self.get_selection()
422            sel = selector()
[102]423            if type(scanid) is str:
[946]424                sel.set_name(scanid)
[1594]425                return self._select_copy(sel)
[102]426            elif type(scanid) is int:
[946]427                sel.set_scans([scanid])
[1594]428                return self._select_copy(sel)
[381]429            elif type(scanid) is list:
[946]430                sel.set_scans(scanid)
[1594]431                return self._select_copy(sel)
[381]432            else:
[718]433                msg = "Illegal scanid type, use 'int' or 'list' if ints."
[1859]434                raise TypeError(msg)
[102]435        except RuntimeError:
[1859]436            raise
[102]437
438    def __str__(self):
[2315]439        tempFile = tempfile.NamedTemporaryFile()
440        Scantable._summary(self, tempFile.name)
441        tempFile.seek(0)
442        asaplog.clear()
443        return tempFile.file.read()
[102]444
[2315]445    @asaplog_post_dec
[976]446    def summary(self, filename=None):
[1846]447        """\
[102]448        Print a summary of the contents of this scantable.
[1846]449
[102]450        Parameters:
[1846]451
[1931]452            filename:    the name of a file to write the putput to
[102]453                         Default - no file output
[1846]454
[102]455        """
[2286]456#         info = Scantable._summary(self)
[102]457        if filename is not None:
[256]458            if filename is "":
459                filename = 'scantable_summary.txt'
[415]460            from os.path import expandvars, isdir
[411]461            filename = expandvars(filename)
[2286]462#             if not isdir(filename):
463#                 data = open(filename, 'w')
464#                 data.write(info)
465#                 data.close()
466#             else:
467            if isdir(filename):
[718]468                msg = "Illegal file name '%s'." % (filename)
[1859]469                raise IOError(msg)
[2286]470        else:
471            filename = ""
472        Scantable._summary(self, filename)
[2290]473#         info = Scantable._summary(self, filename)
[2286]474#         return page(info)
[710]475
[1512]476    def get_spectrum(self, rowno):
[1471]477        """Return the spectrum for the current row in the scantable as a list.
[1846]478
[1471]479        Parameters:
[1846]480
[1573]481             rowno:   the row number to retrieve the spectrum from
[1846]482
[1471]483        """
484        return self._getspectrum(rowno)
[946]485
[1471]486    def get_mask(self, rowno):
487        """Return the mask for the current row in the scantable as a list.
[1846]488
[1471]489        Parameters:
[1846]490
[1573]491             rowno:   the row number to retrieve the mask from
[1846]492
[1471]493        """
494        return self._getmask(rowno)
495
496    def set_spectrum(self, spec, rowno):
[1938]497        """Set the spectrum for the current row in the scantable.
[1846]498
[1471]499        Parameters:
[1846]500
[1855]501             spec:   the new spectrum
[1846]502
[1855]503             rowno:  the row number to set the spectrum for
504
[1471]505        """
506        assert(len(spec) == self.nchan())
507        return self._setspectrum(spec, rowno)
508
[1600]509    def get_coordinate(self, rowno):
510        """Return the (spectral) coordinate for a a given 'rowno'.
[1846]511
512        *Note*:
513
[1600]514            * This coordinate is only valid until a scantable method modifies
515              the frequency axis.
516            * This coordinate does contain the original frequency set-up
517              NOT the new frame. The conversions however are done using the user
518              specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
519              use scantable.freq_align first. Without it there is no closure,
[1846]520              i.e.::
[1600]521
[1846]522                  c = myscan.get_coordinate(0)
523                  c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
524
[1600]525        Parameters:
[1846]526
[1600]527             rowno:    the row number for the spectral coordinate
528
529        """
530        return coordinate(Scantable.get_coordinate(self, rowno))
531
[946]532    def get_selection(self):
[1846]533        """\
[1005]534        Get the selection object currently set on this scantable.
[1846]535
536        Example::
537
[1005]538            sel = scan.get_selection()
539            sel.set_ifs(0)              # select IF 0
540            scan.set_selection(sel)     # apply modified selection
[1846]541
[946]542        """
543        return selector(self._getselection())
544
[1576]545    def set_selection(self, selection=None, **kw):
[1846]546        """\
[1005]547        Select a subset of the data. All following operations on this scantable
548        are only applied to thi selection.
[1846]549
[1005]550        Parameters:
[1697]551
[1846]552            selection:    a selector object (default unset the selection), or
553                          any combination of "pols", "ifs", "beams", "scans",
554                          "cycles", "name", "query"
[1697]555
[1846]556        Examples::
[1697]557
[1005]558            sel = selector()         # create a selection object
[1118]559            self.set_scans([0, 3])    # select SCANNO 0 and 3
[1005]560            scan.set_selection(sel)  # set the selection
561            scan.summary()           # will only print summary of scanno 0 an 3
562            scan.set_selection()     # unset the selection
[1697]563            # or the equivalent
564            scan.set_selection(scans=[0,3])
565            scan.summary()           # will only print summary of scanno 0 an 3
566            scan.set_selection()     # unset the selection
[1846]567
[946]568        """
[1576]569        if selection is None:
570            # reset
571            if len(kw) == 0:
572                selection = selector()
573            else:
574                # try keywords
575                for k in kw:
576                    if k not in selector.fields:
[2320]577                        raise KeyError("Invalid selection key '%s', "
578                                       "valid keys are %s" % (k,
579                                                              selector.fields))
[1576]580                selection = selector(**kw)
[946]581        self._setselection(selection)
582
[1819]583    def get_row(self, row=0, insitu=None):
[1846]584        """\
[1819]585        Select a row in the scantable.
586        Return a scantable with single row.
[1846]587
[1819]588        Parameters:
[1846]589
590            row:    row no of integration, default is 0.
591            insitu: if False a new scantable is returned. Otherwise, the
592                    scaling is done in-situ. The default is taken from .asaprc
593                    (False)
594
[1819]595        """
596        if insitu is None: insitu = rcParams['insitu']
597        if not insitu:
598            workscan = self.copy()
599        else:
600            workscan = self
601        # Select a row
602        sel=selector()
[1992]603        sel.set_rows([row])
604        #sel.set_scans([workscan.getscan(row)])
605        #sel.set_cycles([workscan.getcycle(row)])
606        #sel.set_beams([workscan.getbeam(row)])
607        #sel.set_ifs([workscan.getif(row)])
608        #sel.set_polarisations([workscan.getpol(row)])
609        #sel.set_name(workscan._getsourcename(row))
[1819]610        workscan.set_selection(sel)
611        if not workscan.nrow() == 1:
612            msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
613            raise RuntimeError(msg)
614        del sel
615        if insitu:
616            self._assign(workscan)
617        else:
618            return workscan
619
[1862]620    @asaplog_post_dec
[1907]621    def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
[1846]622        """\
[135]623        Determine the specified statistic of the current beam/if/pol
[102]624        Takes a 'mask' as an optional parameter to specify which
625        channels should be excluded.
[1846]626
[102]627        Parameters:
[1846]628
[1819]629            stat:    'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
630                     'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
[1855]631
[135]632            mask:    an optional mask specifying where the statistic
[102]633                     should be determined.
[1855]634
[1819]635            form:    format string to print statistic values
[1846]636
[1907]637            row:     row number of spectrum to process.
638                     (default is None: for all rows)
[1846]639
[1907]640        Example:
[113]641            scan.set_unit('channel')
[1118]642            msk = scan.create_mask([100, 200], [500, 600])
[135]643            scan.stats(stat='mean', mask=m)
[1846]644
[102]645        """
[1593]646        mask = mask or []
[876]647        if not self._check_ifs():
[1118]648            raise ValueError("Cannot apply mask as the IFs have different "
649                             "number of channels. Please use setselection() "
650                             "to select individual IFs")
[1819]651        rtnabc = False
652        if stat.lower().endswith('_abc'): rtnabc = True
653        getchan = False
654        if stat.lower().startswith('min') or stat.lower().startswith('max'):
655            chan = self._math._minmaxchan(self, mask, stat)
656            getchan = True
657            statvals = []
[1907]658        if not rtnabc:
659            if row == None:
660                statvals = self._math._stats(self, mask, stat)
661            else:
662                statvals = self._math._statsrow(self, mask, stat, int(row))
[256]663
[1819]664        #def cb(i):
665        #    return statvals[i]
[256]666
[1819]667        #return self._row_callback(cb, stat)
[102]668
[1819]669        label=stat
670        #callback=cb
671        out = ""
672        #outvec = []
673        sep = '-'*50
[1907]674
675        if row == None:
676            rows = xrange(self.nrow())
677        elif isinstance(row, int):
678            rows = [ row ]
679
680        for i in rows:
[1819]681            refstr = ''
682            statunit= ''
683            if getchan:
684                qx, qy = self.chan2data(rowno=i, chan=chan[i])
685                if rtnabc:
686                    statvals.append(qx['value'])
687                    refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
688                    statunit= '['+qx['unit']+']'
689                else:
690                    refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
691
692            tm = self._gettime(i)
693            src = self._getsourcename(i)
694            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
695            out += 'Time[%s]:\n' % (tm)
[1907]696            if self.nbeam(-1) > 1: out +=  ' Beam[%d] ' % (self.getbeam(i))
697            if self.nif(-1) > 1:   out +=  ' IF[%d] ' % (self.getif(i))
698            if self.npol(-1) > 1:  out +=  ' Pol[%d] ' % (self.getpol(i))
[1819]699            #outvec.append(callback(i))
[1907]700            if len(rows) > 1:
701                # out += ('= %'+form) % (outvec[i]) +'   '+refstr+'\n'
702                out += ('= %'+form) % (statvals[i]) +'   '+refstr+'\n'
703            else:
704                # out += ('= %'+form) % (outvec[0]) +'   '+refstr+'\n'
705                out += ('= %'+form) % (statvals[0]) +'   '+refstr+'\n'
[1819]706            out +=  sep+"\n"
707
[1859]708        import os
709        if os.environ.has_key( 'USER' ):
710            usr = os.environ['USER']
711        else:
712            import commands
713            usr = commands.getoutput( 'whoami' )
714        tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
715        f = open(tmpfile,'w')
716        print >> f, sep
717        print >> f, ' %s %s' % (label, statunit)
718        print >> f, sep
719        print >> f, out
720        f.close()
721        f = open(tmpfile,'r')
722        x = f.readlines()
723        f.close()
724        asaplog.push(''.join(x), False)
725
[1819]726        return statvals
727
728    def chan2data(self, rowno=0, chan=0):
[1846]729        """\
[1819]730        Returns channel/frequency/velocity and spectral value
731        at an arbitrary row and channel in the scantable.
[1846]732
[1819]733        Parameters:
[1846]734
[1819]735            rowno:   a row number in the scantable. Default is the
736                     first row, i.e. rowno=0
[1855]737
[1819]738            chan:    a channel in the scantable. Default is the first
739                     channel, i.e. pos=0
[1846]740
[1819]741        """
742        if isinstance(rowno, int) and isinstance(chan, int):
743            qx = {'unit': self.get_unit(),
744                  'value': self._getabcissa(rowno)[chan]}
745            qy = {'unit': self.get_fluxunit(),
746                  'value': self._getspectrum(rowno)[chan]}
747            return qx, qy
748
[1118]749    def stddev(self, mask=None):
[1846]750        """\
[135]751        Determine the standard deviation of the current beam/if/pol
752        Takes a 'mask' as an optional parameter to specify which
753        channels should be excluded.
[1846]754
[135]755        Parameters:
[1846]756
[135]757            mask:    an optional mask specifying where the standard
758                     deviation should be determined.
759
[1846]760        Example::
761
[135]762            scan.set_unit('channel')
[1118]763            msk = scan.create_mask([100, 200], [500, 600])
[135]764            scan.stddev(mask=m)
[1846]765
[135]766        """
[1118]767        return self.stats(stat='stddev', mask=mask);
[135]768
[1003]769
[1259]770    def get_column_names(self):
[1846]771        """\
[1003]772        Return a  list of column names, which can be used for selection.
773        """
[1259]774        return list(Scantable.get_column_names(self))
[1003]775
[1730]776    def get_tsys(self, row=-1):
[1846]777        """\
[113]778        Return the System temperatures.
[1846]779
780        Parameters:
781
782            row:    the rowno to get the information for. (default all rows)
783
[113]784        Returns:
[1846]785
[876]786            a list of Tsys values for the current selection
[1846]787
[113]788        """
[1730]789        if row > -1:
790            return self._get_column(self._gettsys, row)
[876]791        return self._row_callback(self._gettsys, "Tsys")
[256]792
[1730]793
794    def get_weather(self, row=-1):
[1846]795        """\
796        Return the weather informations.
797
798        Parameters:
799
800            row:    the rowno to get the information for. (default all rows)
801
802        Returns:
803
804            a dict or list of of dicts of values for the current selection
805
806        """
807
[1730]808        values = self._get_column(self._get_weather, row)
809        if row > -1:
810            return {'temperature': values[0],
811                    'pressure': values[1], 'humidity' : values[2],
812                    'windspeed' : values[3], 'windaz' : values[4]
813                    }
814        else:
815            out = []
816            for r in values:
817
818                out.append({'temperature': r[0],
819                            'pressure': r[1], 'humidity' : r[2],
820                            'windspeed' : r[3], 'windaz' : r[4]
821                    })
822            return out
823
[876]824    def _row_callback(self, callback, label):
825        out = ""
[1118]826        outvec = []
[1590]827        sep = '-'*50
[876]828        for i in range(self.nrow()):
829            tm = self._gettime(i)
830            src = self._getsourcename(i)
[1590]831            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
[876]832            out += 'Time[%s]:\n' % (tm)
[1590]833            if self.nbeam(-1) > 1:
834                out +=  ' Beam[%d] ' % (self.getbeam(i))
835            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (self.getif(i))
836            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (self.getpol(i))
[876]837            outvec.append(callback(i))
838            out += '= %3.3f\n' % (outvec[i])
[1590]839            out +=  sep+'\n'
[1859]840
841        asaplog.push(sep)
842        asaplog.push(" %s" % (label))
843        asaplog.push(sep)
844        asaplog.push(out)
[1861]845        asaplog.post()
[1175]846        return outvec
[256]847
[1947]848    def _get_column(self, callback, row=-1, *args):
[1070]849        """
850        """
851        if row == -1:
[1947]852            return [callback(i, *args) for i in range(self.nrow())]
[1070]853        else:
[1819]854            if  0 <= row < self.nrow():
[1947]855                return callback(row, *args)
[256]856
[1070]857
[1948]858    def get_time(self, row=-1, asdatetime=False, prec=-1):
[1846]859        """\
[113]860        Get a list of time stamps for the observations.
[1938]861        Return a datetime object or a string (default) for each
862        integration time stamp in the scantable.
[1846]863
[113]864        Parameters:
[1846]865
[1348]866            row:          row no of integration. Default -1 return all rows
[1855]867
[1348]868            asdatetime:   return values as datetime objects rather than strings
[1846]869
[2277]870            prec:         number of digits shown. Default -1 to automatic calculation.
[1948]871                          Note this number is equals to the digits of MVTime,
872                          i.e., 0<prec<3: dates with hh:: only,
873                          <5: with hh:mm:, <7 or 0: with hh:mm:ss,
[1947]874                          and 6> : with hh:mm:ss.tt... (prec-6 t's added)
875
[113]876        """
[1175]877        from datetime import datetime
[1948]878        if prec < 0:
879            # automagically set necessary precision +1
[1950]880            prec = 7 - numpy.floor(numpy.log10(numpy.min(self.get_inttime(row))))
[1948]881            prec = max(6, int(prec))
882        else:
883            prec = max(0, prec)
884        if asdatetime:
885            #precision can be 1 millisecond at max
886            prec = min(12, prec)
887
[1947]888        times = self._get_column(self._gettime, row, prec)
[1348]889        if not asdatetime:
[1392]890            return times
[1947]891        format = "%Y/%m/%d/%H:%M:%S.%f"
892        if prec < 7:
893            nsub = 1 + (((6-prec)/2) % 3)
894            substr = [".%f","%S","%M"]
895            for i in range(nsub):
896                format = format.replace(substr[i],"")
[1175]897        if isinstance(times, list):
[1947]898            return [datetime.strptime(i, format) for i in times]
[1175]899        else:
[1947]900            return datetime.strptime(times, format)
[102]901
[1348]902
903    def get_inttime(self, row=-1):
[1846]904        """\
[1348]905        Get a list of integration times for the observations.
906        Return a time in seconds for each integration in the scantable.
[1846]907
[1348]908        Parameters:
[1846]909
[1348]910            row:    row no of integration. Default -1 return all rows.
[1846]911
[1348]912        """
[1573]913        return self._get_column(self._getinttime, row)
[1348]914
[1573]915
[714]916    def get_sourcename(self, row=-1):
[1846]917        """\
[794]918        Get a list source names for the observations.
[714]919        Return a string for each integration in the scantable.
920        Parameters:
[1846]921
[1348]922            row:    row no of integration. Default -1 return all rows.
[1846]923
[714]924        """
[1070]925        return self._get_column(self._getsourcename, row)
[714]926
[794]927    def get_elevation(self, row=-1):
[1846]928        """\
[794]929        Get a list of elevations for the observations.
930        Return a float for each integration in the scantable.
[1846]931
[794]932        Parameters:
[1846]933
[1348]934            row:    row no of integration. Default -1 return all rows.
[1846]935
[794]936        """
[1070]937        return self._get_column(self._getelevation, row)
[794]938
939    def get_azimuth(self, row=-1):
[1846]940        """\
[794]941        Get a list of azimuths for the observations.
942        Return a float for each integration in the scantable.
[1846]943
[794]944        Parameters:
[1348]945            row:    row no of integration. Default -1 return all rows.
[1846]946
[794]947        """
[1070]948        return self._get_column(self._getazimuth, row)
[794]949
950    def get_parangle(self, row=-1):
[1846]951        """\
[794]952        Get a list of parallactic angles for the observations.
953        Return a float for each integration in the scantable.
[1846]954
[794]955        Parameters:
[1846]956
[1348]957            row:    row no of integration. Default -1 return all rows.
[1846]958
[794]959        """
[1070]960        return self._get_column(self._getparangle, row)
[794]961
[1070]962    def get_direction(self, row=-1):
963        """
964        Get a list of Positions on the sky (direction) for the observations.
[1594]965        Return a string for each integration in the scantable.
[1855]966
[1070]967        Parameters:
[1855]968
[1070]969            row:    row no of integration. Default -1 return all rows
[1855]970
[1070]971        """
972        return self._get_column(self._getdirection, row)
973
[1391]974    def get_directionval(self, row=-1):
[1846]975        """\
[1391]976        Get a list of Positions on the sky (direction) for the observations.
977        Return a float for each integration in the scantable.
[1846]978
[1391]979        Parameters:
[1846]980
[1391]981            row:    row no of integration. Default -1 return all rows
[1846]982
[1391]983        """
984        return self._get_column(self._getdirectionvec, row)
985
[1862]986    @asaplog_post_dec
[102]987    def set_unit(self, unit='channel'):
[1846]988        """\
[102]989        Set the unit for all following operations on this scantable
[1846]990
[102]991        Parameters:
[1846]992
993            unit:    optional unit, default is 'channel'. Use one of '*Hz',
994                     'km/s', 'channel' or equivalent ''
995
[102]996        """
[484]997        varlist = vars()
[1118]998        if unit in ['', 'pixel', 'channel']:
[113]999            unit = ''
1000        inf = list(self._getcoordinfo())
1001        inf[0] = unit
1002        self._setcoordinfo(inf)
[1118]1003        self._add_history("set_unit", varlist)
[113]1004
[1862]1005    @asaplog_post_dec
[484]1006    def set_instrument(self, instr):
[1846]1007        """\
[1348]1008        Set the instrument for subsequent processing.
[1846]1009
[358]1010        Parameters:
[1846]1011
[710]1012            instr:    Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]1013                      'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[1846]1014
[358]1015        """
1016        self._setInstrument(instr)
[1118]1017        self._add_history("set_instument", vars())
[358]1018
[1862]1019    @asaplog_post_dec
[1190]1020    def set_feedtype(self, feedtype):
[1846]1021        """\
[1190]1022        Overwrite the feed type, which might not be set correctly.
[1846]1023
[1190]1024        Parameters:
[1846]1025
[1190]1026            feedtype:     'linear' or 'circular'
[1846]1027
[1190]1028        """
1029        self._setfeedtype(feedtype)
1030        self._add_history("set_feedtype", vars())
1031
[1862]1032    @asaplog_post_dec
[276]1033    def set_doppler(self, doppler='RADIO'):
[1846]1034        """\
[276]1035        Set the doppler for all following operations on this scantable.
[1846]1036
[276]1037        Parameters:
[1846]1038
[276]1039            doppler:    One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
[1846]1040
[276]1041        """
[484]1042        varlist = vars()
[276]1043        inf = list(self._getcoordinfo())
1044        inf[2] = doppler
1045        self._setcoordinfo(inf)
[1118]1046        self._add_history("set_doppler", vars())
[710]1047
[1862]1048    @asaplog_post_dec
[226]1049    def set_freqframe(self, frame=None):
[1846]1050        """\
[113]1051        Set the frame type of the Spectral Axis.
[1846]1052
[113]1053        Parameters:
[1846]1054
[591]1055            frame:   an optional frame type, default 'LSRK'. Valid frames are:
[1819]1056                     'TOPO', 'LSRD', 'LSRK', 'BARY',
[1118]1057                     'GEO', 'GALACTO', 'LGROUP', 'CMB'
[1846]1058
1059        Example::
1060
[113]1061            scan.set_freqframe('BARY')
[1846]1062
[113]1063        """
[1593]1064        frame = frame or rcParams['scantable.freqframe']
[484]1065        varlist = vars()
[1819]1066        # "REST" is not implemented in casacore
1067        #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
1068        #           'GEO', 'GALACTO', 'LGROUP', 'CMB']
1069        valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
[1118]1070                   'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]1071
[989]1072        if frame in valid:
[113]1073            inf = list(self._getcoordinfo())
1074            inf[1] = frame
1075            self._setcoordinfo(inf)
[1118]1076            self._add_history("set_freqframe", varlist)
[102]1077        else:
[1118]1078            msg  = "Please specify a valid freq type. Valid types are:\n", valid
[1859]1079            raise TypeError(msg)
[710]1080
[1862]1081    @asaplog_post_dec
[989]1082    def set_dirframe(self, frame=""):
[1846]1083        """\
[989]1084        Set the frame type of the Direction on the sky.
[1846]1085
[989]1086        Parameters:
[1846]1087
[989]1088            frame:   an optional frame type, default ''. Valid frames are:
1089                     'J2000', 'B1950', 'GALACTIC'
[1846]1090
1091        Example:
1092
[989]1093            scan.set_dirframe('GALACTIC')
[1846]1094
[989]1095        """
1096        varlist = vars()
[1859]1097        Scantable.set_dirframe(self, frame)
[1118]1098        self._add_history("set_dirframe", varlist)
[989]1099
[113]1100    def get_unit(self):
[1846]1101        """\
[113]1102        Get the default unit set in this scantable
[1846]1103
[113]1104        Returns:
[1846]1105
[113]1106            A unit string
[1846]1107
[113]1108        """
1109        inf = self._getcoordinfo()
1110        unit = inf[0]
1111        if unit == '': unit = 'channel'
1112        return unit
[102]1113
[1862]1114    @asaplog_post_dec
[158]1115    def get_abcissa(self, rowno=0):
[1846]1116        """\
[158]1117        Get the abcissa in the current coordinate setup for the currently
[113]1118        selected Beam/IF/Pol
[1846]1119
[113]1120        Parameters:
[1846]1121
[226]1122            rowno:    an optional row number in the scantable. Default is the
1123                      first row, i.e. rowno=0
[1846]1124
[113]1125        Returns:
[1846]1126
[1348]1127            The abcissa values and the format string (as a dictionary)
[1846]1128
[113]1129        """
[256]1130        abc = self._getabcissa(rowno)
[710]1131        lbl = self._getabcissalabel(rowno)
[158]1132        return abc, lbl
[113]1133
[1862]1134    @asaplog_post_dec
[2322]1135    def flag(self, mask=None, unflag=False, row=-1):
[1846]1136        """\
[1001]1137        Flag the selected data using an optional channel mask.
[1846]1138
[1001]1139        Parameters:
[1846]1140
[1001]1141            mask:   an optional channel mask, created with create_mask. Default
1142                    (no mask) is all channels.
[1855]1143
[1819]1144            unflag:    if True, unflag the data
[1846]1145
[2322]1146            row:    an optional row number in the scantable.
1147                      Default -1 flags all rows
1148                     
[1001]1149        """
1150        varlist = vars()
[1593]1151        mask = mask or []
[1994]1152        self._flag(row, mask, unflag)
[1001]1153        self._add_history("flag", varlist)
1154
[1862]1155    @asaplog_post_dec
[2322]1156    def flag_row(self, rows=None, unflag=False):
[1846]1157        """\
[1819]1158        Flag the selected data in row-based manner.
[1846]1159
[1819]1160        Parameters:
[1846]1161
[1843]1162            rows:   list of row numbers to be flagged. Default is no row
[2322]1163                    (must be explicitly specified to execute row-based
1164                    flagging).
[1855]1165
[1819]1166            unflag: if True, unflag the data.
[1846]1167
[1819]1168        """
1169        varlist = vars()
[2322]1170        if rows is None:
1171            rows = []
[1859]1172        self._flag_row(rows, unflag)
[1819]1173        self._add_history("flag_row", varlist)
1174
[1862]1175    @asaplog_post_dec
[1819]1176    def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
[1846]1177        """\
[1819]1178        Flag the selected data outside a specified range (in channel-base)
[1846]1179
[1819]1180        Parameters:
[1846]1181
[1819]1182            uthres:      upper threshold.
[1855]1183
[1819]1184            dthres:      lower threshold
[1846]1185
[2322]1186            clipoutside: True for flagging data outside the range
1187                         [dthres:uthres].
[1928]1188                         False for flagging data inside the range.
[1855]1189
[1846]1190            unflag:      if True, unflag the data.
1191
[1819]1192        """
1193        varlist = vars()
[1859]1194        self._clip(uthres, dthres, clipoutside, unflag)
[1819]1195        self._add_history("clip", varlist)
1196
[1862]1197    @asaplog_post_dec
[1584]1198    def lag_flag(self, start, end, unit="MHz", insitu=None):
[1846]1199        """\
[1192]1200        Flag the data in 'lag' space by providing a frequency to remove.
[2177]1201        Flagged data in the scantable get interpolated over the region.
[1192]1202        No taper is applied.
[1846]1203
[1192]1204        Parameters:
[1846]1205
[1579]1206            start:    the start frequency (really a period within the
1207                      bandwidth)  or period to remove
[1855]1208
[1579]1209            end:      the end frequency or period to remove
[1855]1210
[1584]1211            unit:     the frequency unit (default "MHz") or "" for
[1579]1212                      explicit lag channels
[1846]1213
1214        *Notes*:
1215
[1579]1216            It is recommended to flag edges of the band or strong
[1348]1217            signals beforehand.
[1846]1218
[1192]1219        """
1220        if insitu is None: insitu = rcParams['insitu']
1221        self._math._setinsitu(insitu)
1222        varlist = vars()
[1579]1223        base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1224        if not (unit == "" or base.has_key(unit)):
[1192]1225            raise ValueError("%s is not a valid unit." % unit)
[1859]1226        if unit == "":
1227            s = scantable(self._math._lag_flag(self, start, end, "lags"))
1228        else:
1229            s = scantable(self._math._lag_flag(self, start*base[unit],
1230                                               end*base[unit], "frequency"))
[1192]1231        s._add_history("lag_flag", varlist)
1232        if insitu:
1233            self._assign(s)
1234        else:
1235            return s
[1001]1236
[1862]1237    @asaplog_post_dec
[2186]1238    def fft(self, rowno=[], mask=[], getrealimag=False):
[2177]1239        """\
1240        Apply FFT to the spectra.
1241        Flagged data in the scantable get interpolated over the region.
1242
1243        Parameters:
[2186]1244
1245            rowno:          The row number(s) to be processed. int, list
1246                            and tuple are accepted. By default ([]), FFT
1247                            is applied to the whole data.
1248
1249            mask:           Auxiliary channel mask(s). Given as a boolean
1250                            list, it is applied to all specified rows.
1251                            A list of boolean lists can also be used to
1252                            apply different masks. In the latter case, the
1253                            length of 'mask' must be the same as that of
1254                            'rowno'. The default is [].
[2177]1255       
1256            getrealimag:    If True, returns the real and imaginary part
1257                            values of the complex results.
1258                            If False (the default), returns the amplitude
1259                            (absolute value) normalised with Ndata/2 and
1260                            phase (argument, in unit of radian).
1261
1262        Returns:
1263
[2186]1264            A list of dictionaries containing the results for each spectrum.
1265            Each dictionary contains two values, the real and the imaginary
1266            parts when getrealimag = True, or the amplitude(absolute value)
1267            and the phase(argument) when getrealimag = False. The key for
1268            these values are 'real' and 'imag', or 'ampl' and 'phase',
[2177]1269            respectively.
1270        """
1271        if isinstance(rowno, int):
1272            rowno = [rowno]
1273        elif not (isinstance(rowno, list) or isinstance(rowno, tuple)):
[2186]1274            raise TypeError("The row number(s) must be int, list or tuple.")
1275
1276        if len(rowno) == 0: rowno = [i for i in xrange(self.nrow())]
1277
1278        if not (isinstance(mask, list) or isinstance(mask, tuple)):
1279            raise TypeError("The mask must be a boolean list or a list of boolean list.")
1280        if len(mask) == 0: mask = [True for i in xrange(self.nchan())]
1281        if isinstance(mask[0], bool): mask = [mask]
1282        elif not (isinstance(mask[0], list) or isinstance(mask[0], tuple)):
1283            raise TypeError("The mask must be a boolean list or a list of boolean list.")
1284
1285        usecommonmask = (len(mask) == 1)
1286        if not usecommonmask:
1287            if len(mask) != len(rowno):
1288                raise ValueError("When specifying masks for each spectrum, the numbers of them must be identical.")
1289        for amask in mask:
1290            if len(amask) != self.nchan():
1291                raise ValueError("The spectra and the mask have different length.")
[2177]1292       
[2186]1293        res = []
1294
1295        imask = 0
1296        for whichrow in rowno:
1297            fspec = self._fft(whichrow, mask[imask], getrealimag)
1298            nspec = len(fspec)
[2177]1299           
[2186]1300            i=0
1301            v1=[]
1302            v2=[]
1303            reselem = {"real":[],"imag":[]} if getrealimag else {"ampl":[],"phase":[]}
[2177]1304           
[2186]1305            while (i < nspec):
1306                v1.append(fspec[i])
1307                v2.append(fspec[i+1])
1308                i+=2
1309           
[2177]1310            if getrealimag:
[2186]1311                reselem["real"]  += v1
1312                reselem["imag"]  += v2
[2177]1313            else:
[2186]1314                reselem["ampl"]  += v1
1315                reselem["phase"] += v2
[2177]1316           
[2186]1317            res.append(reselem)
1318           
1319            if not usecommonmask: imask += 1
1320       
[2177]1321        return res
1322
1323    @asaplog_post_dec
[113]1324    def create_mask(self, *args, **kwargs):
[1846]1325        """\
[1118]1326        Compute and return a mask based on [min, max] windows.
[189]1327        The specified windows are to be INCLUDED, when the mask is
[113]1328        applied.
[1846]1329
[102]1330        Parameters:
[1846]1331
[1118]1332            [min, max], [min2, max2], ...
[1024]1333                Pairs of start/end points (inclusive)specifying the regions
[102]1334                to be masked
[1855]1335
[189]1336            invert:     optional argument. If specified as True,
1337                        return an inverted mask, i.e. the regions
1338                        specified are EXCLUDED
[1855]1339
[513]1340            row:        create the mask using the specified row for
1341                        unit conversions, default is row=0
1342                        only necessary if frequency varies over rows.
[1846]1343
1344        Examples::
1345
[113]1346            scan.set_unit('channel')
[1846]1347            # a)
[1118]1348            msk = scan.create_mask([400, 500], [800, 900])
[189]1349            # masks everything outside 400 and 500
[113]1350            # and 800 and 900 in the unit 'channel'
1351
[1846]1352            # b)
[1118]1353            msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]1354            # masks the regions between 400 and 500
[113]1355            # and 800 and 900 in the unit 'channel'
[1846]1356
1357            # c)
1358            #mask only channel 400
[1554]1359            msk =  scan.create_mask([400])
[1846]1360
[102]1361        """
[1554]1362        row = kwargs.get("row", 0)
[513]1363        data = self._getabcissa(row)
[113]1364        u = self._getcoordinfo()[0]
[1859]1365        if u == "":
1366            u = "channel"
1367        msg = "The current mask window unit is %s" % u
1368        i = self._check_ifs()
1369        if not i:
1370            msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1371        asaplog.push(msg)
[102]1372        n = self.nchan()
[1295]1373        msk = _n_bools(n, False)
[710]1374        # test if args is a 'list' or a 'normal *args - UGLY!!!
1375
[1118]1376        ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1377             and args or args[0]
[710]1378        for window in ws:
[1554]1379            if len(window) == 1:
1380                window = [window[0], window[0]]
1381            if len(window) == 0 or  len(window) > 2:
1382                raise ValueError("A window needs to be defined as [start(, end)]")
[1545]1383            if window[0] > window[1]:
1384                tmp = window[0]
1385                window[0] = window[1]
1386                window[1] = tmp
[102]1387            for i in range(n):
[1024]1388                if data[i] >= window[0] and data[i] <= window[1]:
[1295]1389                    msk[i] = True
[113]1390        if kwargs.has_key('invert'):
1391            if kwargs.get('invert'):
[1295]1392                msk = mask_not(msk)
[102]1393        return msk
[710]1394
[1931]1395    def get_masklist(self, mask=None, row=0, silent=False):
[1846]1396        """\
[1819]1397        Compute and return a list of mask windows, [min, max].
[1846]1398
[1819]1399        Parameters:
[1846]1400
[1819]1401            mask:       channel mask, created with create_mask.
[1855]1402
[1819]1403            row:        calcutate the masklist using the specified row
1404                        for unit conversions, default is row=0
1405                        only necessary if frequency varies over rows.
[1846]1406
[1819]1407        Returns:
[1846]1408
[1819]1409            [min, max], [min2, max2], ...
1410                Pairs of start/end points (inclusive)specifying
1411                the masked regions
[1846]1412
[1819]1413        """
1414        if not (isinstance(mask,list) or isinstance(mask, tuple)):
1415            raise TypeError("The mask should be list or tuple.")
1416        if len(mask) < 2:
1417            raise TypeError("The mask elements should be > 1")
1418        if self.nchan() != len(mask):
1419            msg = "Number of channels in scantable != number of mask elements"
1420            raise TypeError(msg)
1421        data = self._getabcissa(row)
1422        u = self._getcoordinfo()[0]
[1859]1423        if u == "":
1424            u = "channel"
1425        msg = "The current mask window unit is %s" % u
1426        i = self._check_ifs()
1427        if not i:
1428            msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
[1931]1429        if not silent:
1430            asaplog.push(msg)
[1819]1431        masklist=[]
1432        ist, ien = None, None
1433        ist, ien=self.get_mask_indices(mask)
1434        if ist is not None and ien is not None:
1435            for i in xrange(len(ist)):
1436                range=[data[ist[i]],data[ien[i]]]
1437                range.sort()
1438                masklist.append([range[0],range[1]])
1439        return masklist
1440
1441    def get_mask_indices(self, mask=None):
[1846]1442        """\
[1819]1443        Compute and Return lists of mask start indices and mask end indices.
[1855]1444
1445        Parameters:
1446
[1819]1447            mask:       channel mask, created with create_mask.
[1846]1448
[1819]1449        Returns:
[1846]1450
[1819]1451            List of mask start indices and that of mask end indices,
1452            i.e., [istart1,istart2,....], [iend1,iend2,....].
[1846]1453
[1819]1454        """
1455        if not (isinstance(mask,list) or isinstance(mask, tuple)):
1456            raise TypeError("The mask should be list or tuple.")
1457        if len(mask) < 2:
1458            raise TypeError("The mask elements should be > 1")
1459        istart=[]
1460        iend=[]
1461        if mask[0]: istart.append(0)
1462        for i in range(len(mask)-1):
1463            if not mask[i] and mask[i+1]:
1464                istart.append(i+1)
1465            elif mask[i] and not mask[i+1]:
1466                iend.append(i)
1467        if mask[len(mask)-1]: iend.append(len(mask)-1)
1468        if len(istart) != len(iend):
1469            raise RuntimeError("Numbers of mask start != mask end.")
1470        for i in range(len(istart)):
1471            if istart[i] > iend[i]:
1472                raise RuntimeError("Mask start index > mask end index")
1473                break
1474        return istart,iend
1475
[2013]1476    @asaplog_post_dec
1477    def parse_maskexpr(self,maskstring):
1478        """
1479        Parse CASA type mask selection syntax (IF dependent).
1480
1481        Parameters:
1482            maskstring : A string mask selection expression.
1483                         A comma separated selections mean different IF -
1484                         channel combinations. IFs and channel selections
1485                         are partitioned by a colon, ':'.
1486                     examples:
[2015]1487                         ''          = all IFs (all channels)
[2013]1488                         '<2,4~6,9'  = IFs 0,1,4,5,6,9 (all channels)
1489                         '3:3~45;60' = channels 3 to 45 and 60 in IF 3
1490                         '0~1:2~6,8' = channels 2 to 6 in IFs 0,1, and
1491                                       all channels in IF8
1492        Returns:
1493        A dictionary of selected (valid) IF and masklist pairs,
1494        e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
1495        """
1496        if not isinstance(maskstring,str):
1497            asaplog.post()
1498            asaplog.push("Invalid mask expression")
1499            asaplog.post("ERROR")
1500       
1501        valid_ifs = self.getifnos()
1502        frequnit = self.get_unit()
1503        seldict = {}
[2015]1504        if maskstring == "":
1505            maskstring = str(valid_ifs)[1:-1]
[2013]1506        ## split each selection
1507        sellist = maskstring.split(',')
1508        for currselstr in sellist:
1509            selset = currselstr.split(':')
1510            # spw and mask string (may include ~, < or >)
1511            spwmasklist = self._parse_selection(selset[0],typestr='integer',
1512                                               offset=1,minval=min(valid_ifs),
1513                                               maxval=max(valid_ifs))
1514            for spwlist in spwmasklist:
1515                selspws = []
1516                for ispw in range(spwlist[0],spwlist[1]+1):
1517                    # Put into the list only if ispw exists
1518                    if valid_ifs.count(ispw):
1519                        selspws.append(ispw)
1520            del spwmasklist, spwlist
1521
1522            # parse frequency mask list
1523            if len(selset) > 1:
1524                freqmasklist = self._parse_selection(selset[1],typestr='float',
1525                                                    offset=0.)
1526            else:
1527                # want to select the whole spectrum
1528                freqmasklist = [None]
1529
1530            ## define a dictionary of spw - masklist combination
1531            for ispw in selspws:
1532                #print "working on", ispw
1533                spwstr = str(ispw)
1534                if len(selspws) == 0:
1535                    # empty spw
1536                    continue
1537                else:
1538                    ## want to get min and max of the spw and
1539                    ## offset to set for '<' and '>'
1540                    if frequnit == 'channel':
1541                        minfreq = 0
1542                        maxfreq = self.nchan(ifno=ispw)
1543                        offset = 0.5
1544                    else:
1545                        ## This is ugly part. need improvement
1546                        for ifrow in xrange(self.nrow()):
1547                            if self.getif(ifrow) == ispw:
1548                                #print "IF",ispw,"found in row =",ifrow
1549                                break
1550                        freqcoord = self.get_coordinate(ifrow)
1551                        freqs = self._getabcissa(ifrow)
1552                        minfreq = min(freqs)
1553                        maxfreq = max(freqs)
1554                        if len(freqs) == 1:
1555                            offset = 0.5
1556                        elif frequnit.find('Hz') > 0:
1557                            offset = abs(freqcoord.to_frequency(1,unit=frequnit)
1558                                      -freqcoord.to_frequency(0,unit=frequnit))*0.5
1559                        elif frequnit.find('m/s') > 0:
1560                            offset = abs(freqcoord.to_velocity(1,unit=frequnit)
1561                                      -freqcoord.to_velocity(0,unit=frequnit))*0.5
1562                        else:
1563                            asaplog.post()
1564                            asaplog.push("Invalid frequency unit")
1565                            asaplog.post("ERROR")
1566                        del freqs, freqcoord, ifrow
1567                    for freq in freqmasklist:
1568                        selmask = freq or [minfreq, maxfreq]
1569                        if selmask[0] == None:
1570                            ## selection was "<freq[1]".
1571                            if selmask[1] < minfreq:
1572                                ## avoid adding region selection
1573                                selmask = None
1574                            else:
1575                                selmask = [minfreq,selmask[1]-offset]
1576                        elif selmask[1] == None:
1577                            ## selection was ">freq[0]"
1578                            if selmask[0] > maxfreq:
1579                                ## avoid adding region selection
1580                                selmask = None
1581                            else:
1582                                selmask = [selmask[0]+offset,maxfreq]
1583                        if selmask:
1584                            if not seldict.has_key(spwstr):
1585                                # new spw selection
1586                                seldict[spwstr] = []
1587                            seldict[spwstr] += [selmask]
1588                    del minfreq,maxfreq,offset,freq,selmask
1589                del spwstr
1590            del freqmasklist
1591        del valid_ifs
1592        if len(seldict) == 0:
1593            asaplog.post()
1594            asaplog.push("No valid selection in the mask expression: "+maskstring)
1595            asaplog.post("WARN")
1596            return None
1597        msg = "Selected masklist:\n"
1598        for sif, lmask in seldict.iteritems():
1599            msg += "   IF"+sif+" - "+str(lmask)+"\n"
1600        asaplog.push(msg)
1601        return seldict
1602
1603    def _parse_selection(self,selstr,typestr='float',offset=0.,minval=None,maxval=None):
1604        """
1605        Parameters:
1606            selstr :    The Selection string, e.g., '<3;5~7;100~103;9'
1607            typestr :   The type of the values in returned list
1608                        ('integer' or 'float')
1609            offset :    The offset value to subtract from or add to
1610                        the boundary value if the selection string
1611                        includes '<' or '>'
1612            minval, maxval :  The minimum/maximum values to set if the
1613                              selection string includes '<' or '>'.
1614                              The list element is filled with None by default.
1615        Returns:
1616            A list of min/max pair of selections.
1617        Example:
1618            _parseSelection('<3;5~7;9',typestr='int',offset=1,minval=0)
1619            returns [[0,2],[5,7],[9,9]]
1620        """
1621        selgroups = selstr.split(';')
1622        sellists = []
1623        if typestr.lower().startswith('int'):
1624            formatfunc = int
1625        else:
1626            formatfunc = float
1627       
1628        for currsel in  selgroups:
1629            if currsel.find('~') > 0:
1630                minsel = formatfunc(currsel.split('~')[0].strip())
1631                maxsel = formatfunc(currsel.split('~')[1].strip())
1632            elif currsel.strip().startswith('<'):
1633                minsel = minval
1634                maxsel = formatfunc(currsel.split('<')[1].strip()) \
1635                         - formatfunc(offset)
1636            elif currsel.strip().startswith('>'):
1637                minsel = formatfunc(currsel.split('>')[1].strip()) \
1638                         + formatfunc(offset)
1639                maxsel = maxval
1640            else:
1641                minsel = formatfunc(currsel)
1642                maxsel = formatfunc(currsel)
1643            sellists.append([minsel,maxsel])
1644        return sellists
1645
[1819]1646#    def get_restfreqs(self):
1647#        """
1648#        Get the restfrequency(s) stored in this scantable.
1649#        The return value(s) are always of unit 'Hz'
1650#        Parameters:
1651#            none
1652#        Returns:
1653#            a list of doubles
1654#        """
1655#        return list(self._getrestfreqs())
1656
1657    def get_restfreqs(self, ids=None):
[1846]1658        """\
[256]1659        Get the restfrequency(s) stored in this scantable.
1660        The return value(s) are always of unit 'Hz'
[1846]1661
[256]1662        Parameters:
[1846]1663
[1819]1664            ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1665                 be retrieved
[1846]1666
[256]1667        Returns:
[1846]1668
[1819]1669            dictionary containing ids and a list of doubles for each id
[1846]1670
[256]1671        """
[1819]1672        if ids is None:
1673            rfreqs={}
1674            idlist = self.getmolnos()
1675            for i in idlist:
1676                rfreqs[i]=list(self._getrestfreqs(i))
1677            return rfreqs
1678        else:
1679            if type(ids)==list or type(ids)==tuple:
1680                rfreqs={}
1681                for i in ids:
1682                    rfreqs[i]=list(self._getrestfreqs(i))
1683                return rfreqs
1684            else:
1685                return list(self._getrestfreqs(ids))
1686            #return list(self._getrestfreqs(ids))
[102]1687
[931]1688    def set_restfreqs(self, freqs=None, unit='Hz'):
[1846]1689        """\
[931]1690        Set or replace the restfrequency specified and
[1938]1691        if the 'freqs' argument holds a scalar,
[931]1692        then that rest frequency will be applied to all the selected
1693        data.  If the 'freqs' argument holds
1694        a vector, then it MUST be of equal or smaller length than
1695        the number of IFs (and the available restfrequencies will be
1696        replaced by this vector).  In this case, *all* data have
1697        the restfrequency set per IF according
1698        to the corresponding value you give in the 'freqs' vector.
[1118]1699        E.g. 'freqs=[1e9, 2e9]'  would mean IF 0 gets restfreq 1e9 and
[931]1700        IF 1 gets restfreq 2e9.
[1846]1701
[1395]1702        You can also specify the frequencies via a linecatalog.
[1153]1703
[931]1704        Parameters:
[1846]1705
[931]1706            freqs:   list of rest frequency values or string idenitfiers
[1855]1707
[931]1708            unit:    unit for rest frequency (default 'Hz')
[402]1709
[1846]1710
1711        Example::
1712
[1819]1713            # set the given restfrequency for the all currently selected IFs
[931]1714            scan.set_restfreqs(freqs=1.4e9)
[1845]1715            # set restfrequencies for the n IFs  (n > 1) in the order of the
1716            # list, i.e
1717            # IF0 -> 1.4e9, IF1 ->  1.41e9, IF3 -> 1.42e9
1718            # len(list_of_restfreqs) == nIF
1719            # for nIF == 1 the following will set multiple restfrequency for
1720            # that IF
[1819]1721            scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
[1845]1722            # set multiple restfrequencies per IF. as a list of lists where
1723            # the outer list has nIF elements, the inner s arbitrary
1724            scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
[391]1725
[1846]1726       *Note*:
[1845]1727
[931]1728            To do more sophisticate Restfrequency setting, e.g. on a
1729            source and IF basis, use scantable.set_selection() before using
[1846]1730            this function::
[931]1731
[1846]1732                # provided your scantable is called scan
1733                selection = selector()
1734                selection.set_name("ORION*")
1735                selection.set_ifs([1])
1736                scan.set_selection(selection)
1737                scan.set_restfreqs(freqs=86.6e9)
1738
[931]1739        """
1740        varlist = vars()
[1157]1741        from asap import linecatalog
1742        # simple  value
[1118]1743        if isinstance(freqs, int) or isinstance(freqs, float):
[1845]1744            self._setrestfreqs([freqs], [""], unit)
[1157]1745        # list of values
[1118]1746        elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]1747            # list values are scalars
[1118]1748            if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[1845]1749                if len(freqs) == 1:
1750                    self._setrestfreqs(freqs, [""], unit)
1751                else:
1752                    # allow the 'old' mode of setting mulitple IFs
1753                    sel = selector()
1754                    savesel = self._getselection()
1755                    iflist = self.getifnos()
1756                    if len(freqs)>len(iflist):
1757                        raise ValueError("number of elements in list of list "
1758                                         "exeeds the current IF selections")
1759                    iflist = self.getifnos()
1760                    for i, fval in enumerate(freqs):
1761                        sel.set_ifs(iflist[i])
1762                        self._setselection(sel)
1763                        self._setrestfreqs([fval], [""], unit)
1764                    self._setselection(savesel)
1765
1766            # list values are dict, {'value'=, 'name'=)
[1157]1767            elif isinstance(freqs[-1], dict):
[1845]1768                values = []
1769                names = []
1770                for d in freqs:
1771                    values.append(d["value"])
1772                    names.append(d["name"])
1773                self._setrestfreqs(values, names, unit)
[1819]1774            elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
[1157]1775                sel = selector()
1776                savesel = self._getselection()
[1322]1777                iflist = self.getifnos()
[1819]1778                if len(freqs)>len(iflist):
[1845]1779                    raise ValueError("number of elements in list of list exeeds"
1780                                     " the current IF selections")
1781                for i, fval in enumerate(freqs):
[1322]1782                    sel.set_ifs(iflist[i])
[1259]1783                    self._setselection(sel)
[1845]1784                    self._setrestfreqs(fval, [""], unit)
[1157]1785                self._setselection(savesel)
1786        # freqs are to be taken from a linecatalog
[1153]1787        elif isinstance(freqs, linecatalog):
1788            sel = selector()
1789            savesel = self._getselection()
1790            for i in xrange(freqs.nrow()):
[1322]1791                sel.set_ifs(iflist[i])
[1153]1792                self._setselection(sel)
[1845]1793                self._setrestfreqs([freqs.get_frequency(i)],
1794                                   [freqs.get_name(i)], "MHz")
[1153]1795                # ensure that we are not iterating past nIF
1796                if i == self.nif()-1: break
1797            self._setselection(savesel)
[931]1798        else:
1799            return
1800        self._add_history("set_restfreqs", varlist)
1801
[1360]1802    def shift_refpix(self, delta):
[1846]1803        """\
[1589]1804        Shift the reference pixel of the Spectra Coordinate by an
1805        integer amount.
[1846]1806
[1589]1807        Parameters:
[1846]1808
[1589]1809            delta:   the amount to shift by
[1846]1810
1811        *Note*:
1812
[1589]1813            Be careful using this with broadband data.
[1846]1814
[1360]1815        """
[1731]1816        Scantable.shift_refpix(self, delta)
[931]1817
[1862]1818    @asaplog_post_dec
[1259]1819    def history(self, filename=None):
[1846]1820        """\
[1259]1821        Print the history. Optionally to a file.
[1846]1822
[1348]1823        Parameters:
[1846]1824
[1928]1825            filename:    The name of the file to save the history to.
[1846]1826
[1259]1827        """
[484]1828        hist = list(self._gethistory())
[794]1829        out = "-"*80
[484]1830        for h in hist:
[489]1831            if h.startswith("---"):
[1857]1832                out = "\n".join([out, h])
[489]1833            else:
1834                items = h.split("##")
1835                date = items[0]
1836                func = items[1]
1837                items = items[2:]
[794]1838                out += "\n"+date+"\n"
1839                out += "Function: %s\n  Parameters:" % (func)
[489]1840                for i in items:
[1938]1841                    if i == '':
1842                        continue
[489]1843                    s = i.split("=")
[1118]1844                    out += "\n   %s = %s" % (s[0], s[1])
[1857]1845                out = "\n".join([out, "-"*80])
[1259]1846        if filename is not None:
1847            if filename is "":
1848                filename = 'scantable_history.txt'
1849            import os
1850            filename = os.path.expandvars(os.path.expanduser(filename))
1851            if not os.path.isdir(filename):
1852                data = open(filename, 'w')
1853                data.write(out)
1854                data.close()
1855            else:
1856                msg = "Illegal file name '%s'." % (filename)
[1859]1857                raise IOError(msg)
1858        return page(out)
[513]1859    #
1860    # Maths business
1861    #
[1862]1862    @asaplog_post_dec
[931]1863    def average_time(self, mask=None, scanav=False, weight='tint', align=False):
[1846]1864        """\
[1070]1865        Return the (time) weighted average of a scan.
[1846]1866
1867        *Note*:
1868
[1070]1869            in channels only - align if necessary
[1846]1870
[513]1871        Parameters:
[1846]1872
[513]1873            mask:     an optional mask (only used for 'var' and 'tsys'
1874                      weighting)
[1855]1875
[558]1876            scanav:   True averages each scan separately
1877                      False (default) averages all scans together,
[1855]1878
[1099]1879            weight:   Weighting scheme.
1880                      'none'     (mean no weight)
1881                      'var'      (1/var(spec) weighted)
1882                      'tsys'     (1/Tsys**2 weighted)
1883                      'tint'     (integration time weighted)
1884                      'tintsys'  (Tint/Tsys**2)
1885                      'median'   ( median averaging)
[535]1886                      The default is 'tint'
[1855]1887
[931]1888            align:    align the spectra in velocity before averaging. It takes
1889                      the time of the first spectrum as reference time.
[1846]1890
1891        Example::
1892
[513]1893            # time average the scantable without using a mask
[710]1894            newscan = scan.average_time()
[1846]1895
[513]1896        """
1897        varlist = vars()
[1593]1898        weight = weight or 'TINT'
1899        mask = mask or ()
1900        scanav = (scanav and 'SCAN') or 'NONE'
[1118]1901        scan = (self, )
[1859]1902
1903        if align:
1904            scan = (self.freq_align(insitu=False), )
1905        s = None
1906        if weight.upper() == 'MEDIAN':
1907            s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1908                                                     scanav))
1909        else:
1910            s = scantable(self._math._average(scan, mask, weight.upper(),
1911                          scanav))
[1099]1912        s._add_history("average_time", varlist)
[513]1913        return s
[710]1914
[1862]1915    @asaplog_post_dec
[876]1916    def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[1846]1917        """\
[513]1918        Return a scan where all spectra are converted to either
1919        Jansky or Kelvin depending upon the flux units of the scan table.
1920        By default the function tries to look the values up internally.
1921        If it can't find them (or if you want to over-ride), you must
1922        specify EITHER jyperk OR eta (and D which it will try to look up
1923        also if you don't set it). jyperk takes precedence if you set both.
[1846]1924
[513]1925        Parameters:
[1846]1926
[513]1927            jyperk:      the Jy / K conversion factor
[1855]1928
[513]1929            eta:         the aperture efficiency
[1855]1930
[1928]1931            d:           the geometric diameter (metres)
[1855]1932
[513]1933            insitu:      if False a new scantable is returned.
1934                         Otherwise, the scaling is done in-situ
1935                         The default is taken from .asaprc (False)
[1846]1936
[513]1937        """
1938        if insitu is None: insitu = rcParams['insitu']
[876]1939        self._math._setinsitu(insitu)
[513]1940        varlist = vars()
[1593]1941        jyperk = jyperk or -1.0
1942        d = d or -1.0
1943        eta = eta or -1.0
[876]1944        s = scantable(self._math._convertflux(self, d, eta, jyperk))
1945        s._add_history("convert_flux", varlist)
1946        if insitu: self._assign(s)
1947        else: return s
[513]1948
[1862]1949    @asaplog_post_dec
[876]1950    def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[1846]1951        """\
[513]1952        Return a scan after applying a gain-elevation correction.
1953        The correction can be made via either a polynomial or a
1954        table-based interpolation (and extrapolation if necessary).
1955        You specify polynomial coefficients, an ascii table or neither.
1956        If you specify neither, then a polynomial correction will be made
1957        with built in coefficients known for certain telescopes (an error
1958        will occur if the instrument is not known).
1959        The data and Tsys are *divided* by the scaling factors.
[1846]1960
[513]1961        Parameters:
[1846]1962
[513]1963            poly:        Polynomial coefficients (default None) to compute a
1964                         gain-elevation correction as a function of
1965                         elevation (in degrees).
[1855]1966
[513]1967            filename:    The name of an ascii file holding correction factors.
1968                         The first row of the ascii file must give the column
1969                         names and these MUST include columns
1970                         "ELEVATION" (degrees) and "FACTOR" (multiply data
1971                         by this) somewhere.
1972                         The second row must give the data type of the
1973                         column. Use 'R' for Real and 'I' for Integer.
1974                         An example file would be
1975                         (actual factors are arbitrary) :
1976
1977                         TIME ELEVATION FACTOR
1978                         R R R
1979                         0.1 0 0.8
1980                         0.2 20 0.85
1981                         0.3 40 0.9
1982                         0.4 60 0.85
1983                         0.5 80 0.8
1984                         0.6 90 0.75
[1855]1985
[513]1986            method:      Interpolation method when correcting from a table.
1987                         Values are  "nearest", "linear" (default), "cubic"
1988                         and "spline"
[1855]1989
[513]1990            insitu:      if False a new scantable is returned.
1991                         Otherwise, the scaling is done in-situ
1992                         The default is taken from .asaprc (False)
[1846]1993
[513]1994        """
1995
1996        if insitu is None: insitu = rcParams['insitu']
[876]1997        self._math._setinsitu(insitu)
[513]1998        varlist = vars()
[1593]1999        poly = poly or ()
[513]2000        from os.path import expandvars
2001        filename = expandvars(filename)
[876]2002        s = scantable(self._math._gainel(self, poly, filename, method))
2003        s._add_history("gain_el", varlist)
[1593]2004        if insitu:
2005            self._assign(s)
2006        else:
2007            return s
[710]2008
[1862]2009    @asaplog_post_dec
[931]2010    def freq_align(self, reftime=None, method='cubic', insitu=None):
[1846]2011        """\
[513]2012        Return a scan where all rows have been aligned in frequency/velocity.
2013        The alignment frequency frame (e.g. LSRK) is that set by function
2014        set_freqframe.
[1846]2015
[513]2016        Parameters:
[1855]2017
[513]2018            reftime:     reference time to align at. By default, the time of
2019                         the first row of data is used.
[1855]2020
[513]2021            method:      Interpolation method for regridding the spectra.
2022                         Choose from "nearest", "linear", "cubic" (default)
2023                         and "spline"
[1855]2024
[513]2025            insitu:      if False a new scantable is returned.
2026                         Otherwise, the scaling is done in-situ
2027                         The default is taken from .asaprc (False)
[1846]2028
[513]2029        """
[931]2030        if insitu is None: insitu = rcParams["insitu"]
[876]2031        self._math._setinsitu(insitu)
[513]2032        varlist = vars()
[1593]2033        reftime = reftime or ""
[931]2034        s = scantable(self._math._freq_align(self, reftime, method))
[876]2035        s._add_history("freq_align", varlist)
2036        if insitu: self._assign(s)
2037        else: return s
[513]2038
[1862]2039    @asaplog_post_dec
[1725]2040    def opacity(self, tau=None, insitu=None):
[1846]2041        """\
[513]2042        Apply an opacity correction. The data
2043        and Tsys are multiplied by the correction factor.
[1846]2044
[513]2045        Parameters:
[1855]2046
[1689]2047            tau:         (list of) opacity from which the correction factor is
[513]2048                         exp(tau*ZD)
[1689]2049                         where ZD is the zenith-distance.
2050                         If a list is provided, it has to be of length nIF,
2051                         nIF*nPol or 1 and in order of IF/POL, e.g.
2052                         [opif0pol0, opif0pol1, opif1pol0 ...]
[1725]2053                         if tau is `None` the opacities are determined from a
2054                         model.
[1855]2055
[513]2056            insitu:      if False a new scantable is returned.
2057                         Otherwise, the scaling is done in-situ
2058                         The default is taken from .asaprc (False)
[1846]2059
[513]2060        """
2061        if insitu is None: insitu = rcParams['insitu']
[876]2062        self._math._setinsitu(insitu)
[513]2063        varlist = vars()
[1689]2064        if not hasattr(tau, "__len__"):
2065            tau = [tau]
[876]2066        s = scantable(self._math._opacity(self, tau))
2067        s._add_history("opacity", varlist)
2068        if insitu: self._assign(s)
2069        else: return s
[513]2070
[1862]2071    @asaplog_post_dec
[513]2072    def bin(self, width=5, insitu=None):
[1846]2073        """\
[513]2074        Return a scan where all spectra have been binned up.
[1846]2075
[1348]2076        Parameters:
[1846]2077
[513]2078            width:       The bin width (default=5) in pixels
[1855]2079
[513]2080            insitu:      if False a new scantable is returned.
2081                         Otherwise, the scaling is done in-situ
2082                         The default is taken from .asaprc (False)
[1846]2083
[513]2084        """
2085        if insitu is None: insitu = rcParams['insitu']
[876]2086        self._math._setinsitu(insitu)
[513]2087        varlist = vars()
[876]2088        s = scantable(self._math._bin(self, width))
[1118]2089        s._add_history("bin", varlist)
[1589]2090        if insitu:
2091            self._assign(s)
2092        else:
2093            return s
[513]2094
[1862]2095    @asaplog_post_dec
[513]2096    def resample(self, width=5, method='cubic', insitu=None):
[1846]2097        """\
[1348]2098        Return a scan where all spectra have been binned up.
[1573]2099
[1348]2100        Parameters:
[1846]2101
[513]2102            width:       The bin width (default=5) in pixels
[1855]2103
[513]2104            method:      Interpolation method when correcting from a table.
2105                         Values are  "nearest", "linear", "cubic" (default)
2106                         and "spline"
[1855]2107
[513]2108            insitu:      if False a new scantable is returned.
2109                         Otherwise, the scaling is done in-situ
2110                         The default is taken from .asaprc (False)
[1846]2111
[513]2112        """
2113        if insitu is None: insitu = rcParams['insitu']
[876]2114        self._math._setinsitu(insitu)
[513]2115        varlist = vars()
[876]2116        s = scantable(self._math._resample(self, method, width))
[1118]2117        s._add_history("resample", varlist)
[876]2118        if insitu: self._assign(s)
2119        else: return s
[513]2120
[1862]2121    @asaplog_post_dec
[946]2122    def average_pol(self, mask=None, weight='none'):
[1846]2123        """\
[946]2124        Average the Polarisations together.
[1846]2125
[946]2126        Parameters:
[1846]2127
[946]2128            mask:        An optional mask defining the region, where the
2129                         averaging will be applied. The output will have all
2130                         specified points masked.
[1855]2131
[946]2132            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
2133                         weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]2134
[946]2135        """
2136        varlist = vars()
[1593]2137        mask = mask or ()
[1010]2138        s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]2139        s._add_history("average_pol", varlist)
[992]2140        return s
[513]2141
[1862]2142    @asaplog_post_dec
[1145]2143    def average_beam(self, mask=None, weight='none'):
[1846]2144        """\
[1145]2145        Average the Beams together.
[1846]2146
[1145]2147        Parameters:
2148            mask:        An optional mask defining the region, where the
2149                         averaging will be applied. The output will have all
2150                         specified points masked.
[1855]2151
[1145]2152            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
2153                         weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]2154
[1145]2155        """
2156        varlist = vars()
[1593]2157        mask = mask or ()
[1145]2158        s = scantable(self._math._averagebeams(self, mask, weight.upper()))
2159        s._add_history("average_beam", varlist)
2160        return s
2161
[1586]2162    def parallactify(self, pflag):
[1846]2163        """\
[1843]2164        Set a flag to indicate whether this data should be treated as having
[1617]2165        been 'parallactified' (total phase == 0.0)
[1846]2166
[1617]2167        Parameters:
[1855]2168
[1843]2169            pflag:  Bool indicating whether to turn this on (True) or
[1617]2170                    off (False)
[1846]2171
[1617]2172        """
[1586]2173        varlist = vars()
2174        self._parallactify(pflag)
2175        self._add_history("parallactify", varlist)
2176
[1862]2177    @asaplog_post_dec
[992]2178    def convert_pol(self, poltype=None):
[1846]2179        """\
[992]2180        Convert the data to a different polarisation type.
[1565]2181        Note that you will need cross-polarisation terms for most conversions.
[1846]2182
[992]2183        Parameters:
[1855]2184
[992]2185            poltype:    The new polarisation type. Valid types are:
[1565]2186                        "linear", "circular", "stokes" and "linpol"
[1846]2187
[992]2188        """
2189        varlist = vars()
[1859]2190        s = scantable(self._math._convertpol(self, poltype))
[1118]2191        s._add_history("convert_pol", varlist)
[992]2192        return s
2193
[1862]2194    @asaplog_post_dec
[2269]2195    def smooth(self, kernel="hanning", width=5.0, order=2, plot=False,
2196               insitu=None):
[1846]2197        """\
[513]2198        Smooth the spectrum by the specified kernel (conserving flux).
[1846]2199
[513]2200        Parameters:
[1846]2201
[513]2202            kernel:     The type of smoothing kernel. Select from
[1574]2203                        'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
2204                        or 'poly'
[1855]2205
[513]2206            width:      The width of the kernel in pixels. For hanning this is
2207                        ignored otherwise it defauls to 5 pixels.
2208                        For 'gaussian' it is the Full Width Half
2209                        Maximum. For 'boxcar' it is the full width.
[1574]2210                        For 'rmedian' and 'poly' it is the half width.
[1855]2211
[1574]2212            order:      Optional parameter for 'poly' kernel (default is 2), to
2213                        specify the order of the polnomial. Ignored by all other
2214                        kernels.
[1855]2215
[1819]2216            plot:       plot the original and the smoothed spectra.
2217                        In this each indivual fit has to be approved, by
2218                        typing 'y' or 'n'
[1855]2219
[513]2220            insitu:     if False a new scantable is returned.
2221                        Otherwise, the scaling is done in-situ
2222                        The default is taken from .asaprc (False)
[1846]2223
[513]2224        """
2225        if insitu is None: insitu = rcParams['insitu']
[876]2226        self._math._setinsitu(insitu)
[513]2227        varlist = vars()
[1819]2228
2229        if plot: orgscan = self.copy()
2230
[1574]2231        s = scantable(self._math._smooth(self, kernel.lower(), width, order))
[876]2232        s._add_history("smooth", varlist)
[1819]2233
2234        if plot:
[2150]2235            from asap.asapplotter import new_asaplot
2236            theplot = new_asaplot(rcParams['plotter.gui'])
2237            theplot.set_panels()
[1819]2238            ylab=s._get_ordinate_label()
[2150]2239            #theplot.palette(0,["#777777","red"])
[1819]2240            for r in xrange(s.nrow()):
2241                xsm=s._getabcissa(r)
2242                ysm=s._getspectrum(r)
2243                xorg=orgscan._getabcissa(r)
2244                yorg=orgscan._getspectrum(r)
[2150]2245                theplot.clear()
2246                theplot.hold()
2247                theplot.set_axes('ylabel',ylab)
2248                theplot.set_axes('xlabel',s._getabcissalabel(r))
2249                theplot.set_axes('title',s._getsourcename(r))
2250                theplot.set_line(label='Original',color="#777777")
2251                theplot.plot(xorg,yorg)
2252                theplot.set_line(label='Smoothed',color="red")
2253                theplot.plot(xsm,ysm)
[1819]2254                ### Ugly part for legend
2255                for i in [0,1]:
[2150]2256                    theplot.subplots[0]['lines'].append([theplot.subplots[0]['axes'].lines[i]])
2257                theplot.release()
[1819]2258                ### Ugly part for legend
[2150]2259                theplot.subplots[0]['lines']=[]
[1819]2260                res = raw_input("Accept smoothing ([y]/n): ")
2261                if res.upper() == 'N':
2262                    s._setspectrum(yorg, r)
[2150]2263            theplot.quit()
2264            del theplot
[1819]2265            del orgscan
2266
[876]2267        if insitu: self._assign(s)
2268        else: return s
[513]2269
[2186]2270    @asaplog_post_dec
2271    def _parse_wn(self, wn):
2272        if isinstance(wn, list) or isinstance(wn, tuple):
2273            return wn
2274        elif isinstance(wn, int):
2275            return [ wn ]
2276        elif isinstance(wn, str):
[2277]2277            if '-' in wn:                            # case 'a-b' : return [a,a+1,...,b-1,b]
[2186]2278                val = wn.split('-')
2279                val = [int(val[0]), int(val[1])]
2280                val.sort()
2281                res = [i for i in xrange(val[0], val[1]+1)]
[2277]2282            elif wn[:2] == '<=' or wn[:2] == '=<':   # cases '<=a','=<a' : return [0,1,...,a-1,a]
[2186]2283                val = int(wn[2:])+1
2284                res = [i for i in xrange(val)]
[2277]2285            elif wn[-2:] == '>=' or wn[-2:] == '=>': # cases 'a>=','a=>' : return [0,1,...,a-1,a]
[2186]2286                val = int(wn[:-2])+1
2287                res = [i for i in xrange(val)]
[2277]2288            elif wn[0] == '<':                       # case '<a' :         return [0,1,...,a-2,a-1]
[2186]2289                val = int(wn[1:])
2290                res = [i for i in xrange(val)]
[2277]2291            elif wn[-1] == '>':                      # case 'a>' :         return [0,1,...,a-2,a-1]
[2186]2292                val = int(wn[:-1])
2293                res = [i for i in xrange(val)]
[2277]2294            elif wn[:2] == '>=' or wn[:2] == '=>':   # cases '>=a','=>a' : return [a,a+1,...,a_nyq]
[2186]2295                val = int(wn[2:])
2296                res = [i for i in xrange(val, self.nchan()/2+1)]
[2277]2297            elif wn[-2:] == '<=' or wn[-2:] == '=<': # cases 'a<=','a=<' : return [a,a+1,...,a_nyq]
[2186]2298                val = int(wn[:-2])
2299                res = [i for i in xrange(val, self.nchan()/2+1)]
[2277]2300            elif wn[0] == '>':                       # case '>a' :         return [a+1,a+2,...,a_nyq]
[2186]2301                val = int(wn[1:])+1
2302                res = [i for i in xrange(val, self.nchan()/2+1)]
[2277]2303            elif wn[-1] == '<':                      # case 'a<' :         return [a+1,a+2,...,a_nyq]
[2186]2304                val = int(wn[:-1])+1
2305                res = [i for i in xrange(val, self.nchan()/2+1)]
[2012]2306
[2186]2307            return res
2308        else:
2309            msg = 'wrong value given for addwn/rejwn'
2310            raise RuntimeError(msg)
2311
2312
[1862]2313    @asaplog_post_dec
[2277]2314    def sinusoid_baseline(self, insitu=None, mask=None, applyfft=None,
[2269]2315                          fftmethod=None, fftthresh=None,
2316                          addwn=None, rejwn=None, clipthresh=None,
2317                          clipniter=None, plot=None,
2318                          getresidual=None, showprogress=None,
2319                          minnrow=None, outlog=None, blfile=None):
[2047]2320        """\
[2094]2321        Return a scan which has been baselined (all rows) with sinusoidal functions.
[2047]2322        Parameters:
[2186]2323            insitu:        if False a new scantable is returned.
[2081]2324                           Otherwise, the scaling is done in-situ
2325                           The default is taken from .asaprc (False)
[2186]2326            mask:          an optional mask
2327            applyfft:      if True use some method, such as FFT, to find
2328                           strongest sinusoidal components in the wavenumber
2329                           domain to be used for baseline fitting.
2330                           default is True.
2331            fftmethod:     method to find the strong sinusoidal components.
2332                           now only 'fft' is available and it is the default.
2333            fftthresh:     the threshold to select wave numbers to be used for
2334                           fitting from the distribution of amplitudes in the
2335                           wavenumber domain.
2336                           both float and string values accepted.
2337                           given a float value, the unit is set to sigma.
2338                           for string values, allowed formats include:
2339                               'xsigma' or 'x' (= x-sigma level. e.g., '3sigma'), or
2340                               'topx' (= the x strongest ones, e.g. 'top5').
2341                           default is 3.0 (unit: sigma).
2342            addwn:         the additional wave numbers to be used for fitting.
2343                           list or integer value is accepted to specify every
2344                           wave numbers. also string value can be used in case
2345                           you need to specify wave numbers in a certain range,
2346                           e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
2347                                 '<a'  (= 0,1,...,a-2,a-1),
2348                                 '>=a' (= a, a+1, ... up to the maximum wave
2349                                        number corresponding to the Nyquist
2350                                        frequency for the case of FFT).
2351                           default is [].
2352            rejwn:         the wave numbers NOT to be used for fitting.
2353                           can be set just as addwn but has higher priority:
2354                           wave numbers which are specified both in addwn
2355                           and rejwn will NOT be used. default is [].
[2081]2356            clipthresh:    Clipping threshold. (default is 3.0, unit: sigma)
[2129]2357            clipniter:     maximum number of iteration of 'clipthresh'-sigma clipping (default is 0)
[2081]2358            plot:      *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2359                           plot the fit and the residual. In this each
2360                           indivual fit has to be approved, by typing 'y'
2361                           or 'n'
2362            getresidual:   if False, returns best-fit values instead of
2363                           residual. (default is True)
[2189]2364            showprogress:  show progress status for large data.
2365                           default is True.
2366            minnrow:       minimum number of input spectra to show.
2367                           default is 1000.
[2081]2368            outlog:        Output the coefficients of the best-fit
2369                           function to logger (default is False)
2370            blfile:        Name of a text file in which the best-fit
2371                           parameter values to be written
[2186]2372                           (default is '': no file/logger output)
[2047]2373
2374        Example:
2375            # return a scan baselined by a combination of sinusoidal curves having
[2081]2376            # wave numbers in spectral window up to 10,
[2047]2377            # also with 3-sigma clipping, iteration up to 4 times
[2186]2378            bscan = scan.sinusoid_baseline(addwn='<=10',clipthresh=3.0,clipniter=4)
[2081]2379
2380        Note:
2381            The best-fit parameter values output in logger and/or blfile are now
2382            based on specunit of 'channel'.
[2047]2383        """
2384       
[2186]2385        try:
2386            varlist = vars()
[2047]2387       
[2186]2388            if insitu is None: insitu = rcParams['insitu']
2389            if insitu:
2390                workscan = self
2391            else:
2392                workscan = self.copy()
2393           
[2277]2394            if mask          is None: mask          = [True for i in xrange(workscan.nchan())]
[2186]2395            if applyfft      is None: applyfft      = True
2396            if fftmethod     is None: fftmethod     = 'fft'
2397            if fftthresh     is None: fftthresh     = 3.0
2398            if addwn         is None: addwn         = []
2399            if rejwn         is None: rejwn         = []
2400            if clipthresh    is None: clipthresh    = 3.0
2401            if clipniter     is None: clipniter     = 0
2402            if plot          is None: plot          = False
2403            if getresidual   is None: getresidual   = True
[2189]2404            if showprogress  is None: showprogress  = True
2405            if minnrow       is None: minnrow       = 1000
[2186]2406            if outlog        is None: outlog        = False
2407            if blfile        is None: blfile        = ''
[2047]2408
[2081]2409            #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
[2277]2410            workscan._sinusoid_baseline(mask, applyfft, fftmethod.lower(), str(fftthresh).lower(), workscan._parse_wn(addwn), workscan._parse_wn(rejwn), clipthresh, clipniter, getresidual, pack_progress_params(showprogress, minnrow), outlog, blfile)
[2186]2411            workscan._add_history('sinusoid_baseline', varlist)
[2047]2412           
2413            if insitu:
2414                self._assign(workscan)
2415            else:
2416                return workscan
2417           
2418        except RuntimeError, e:
[2186]2419            raise_fitting_failure_exception(e)
[2047]2420
2421
[2186]2422    @asaplog_post_dec
[2277]2423    def auto_sinusoid_baseline(self, insitu=None, mask=None, applyfft=None, fftmethod=None, fftthresh=None,
2424                               addwn=None, rejwn=None, clipthresh=None, clipniter=None, edge=None, threshold=None,
2425                               chan_avg_limit=None, plot=None, getresidual=None, showprogress=None, minnrow=None,
[2189]2426                               outlog=None, blfile=None):
[2047]2427        """\
[2277]2428        Return a scan which has been baselined (all rows) with sinusoidal functions.
[2047]2429        Spectral lines are detected first using linefinder and masked out
2430        to avoid them affecting the baseline solution.
2431
2432        Parameters:
[2189]2433            insitu:         if False a new scantable is returned.
2434                            Otherwise, the scaling is done in-situ
2435                            The default is taken from .asaprc (False)
2436            mask:           an optional mask retreived from scantable
2437            applyfft:       if True use some method, such as FFT, to find
2438                            strongest sinusoidal components in the wavenumber
2439                            domain to be used for baseline fitting.
2440                            default is True.
2441            fftmethod:      method to find the strong sinusoidal components.
2442                            now only 'fft' is available and it is the default.
2443            fftthresh:      the threshold to select wave numbers to be used for
2444                            fitting from the distribution of amplitudes in the
2445                            wavenumber domain.
2446                            both float and string values accepted.
2447                            given a float value, the unit is set to sigma.
2448                            for string values, allowed formats include:
[2277]2449                                'xsigma' or 'x' (= x-sigma level. e.g., '3sigma'), or
[2189]2450                                'topx' (= the x strongest ones, e.g. 'top5').
2451                            default is 3.0 (unit: sigma).
2452            addwn:          the additional wave numbers to be used for fitting.
2453                            list or integer value is accepted to specify every
2454                            wave numbers. also string value can be used in case
2455                            you need to specify wave numbers in a certain range,
2456                            e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
2457                                  '<a'  (= 0,1,...,a-2,a-1),
2458                                  '>=a' (= a, a+1, ... up to the maximum wave
2459                                         number corresponding to the Nyquist
2460                                         frequency for the case of FFT).
2461                            default is [].
2462            rejwn:          the wave numbers NOT to be used for fitting.
2463                            can be set just as addwn but has higher priority:
2464                            wave numbers which are specified both in addwn
2465                            and rejwn will NOT be used. default is [].
2466            clipthresh:     Clipping threshold. (default is 3.0, unit: sigma)
[2277]2467            clipniter:      maximum number of iteration of 'clipthresh'-sigma clipping (default is 0)
[2189]2468            edge:           an optional number of channel to drop at
2469                            the edge of spectrum. If only one value is
2470                            specified, the same number will be dropped
2471                            from both sides of the spectrum. Default
2472                            is to keep all channels. Nested tuples
2473                            represent individual edge selection for
2474                            different IFs (a number of spectral channels
2475                            can be different)
2476            threshold:      the threshold used by line finder. It is
2477                            better to keep it large as only strong lines
2478                            affect the baseline solution.
2479            chan_avg_limit: a maximum number of consequtive spectral
2480                            channels to average during the search of
2481                            weak and broad lines. The default is no
2482                            averaging (and no search for weak lines).
2483                            If such lines can affect the fitted baseline
2484                            (e.g. a high order polynomial is fitted),
2485                            increase this parameter (usually values up
2486                            to 8 are reasonable). Most users of this
2487                            method should find the default value sufficient.
2488            plot:       *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2489                            plot the fit and the residual. In this each
2490                            indivual fit has to be approved, by typing 'y'
2491                            or 'n'
2492            getresidual:    if False, returns best-fit values instead of
2493                            residual. (default is True)
2494            showprogress:   show progress status for large data.
2495                            default is True.
2496            minnrow:        minimum number of input spectra to show.
2497                            default is 1000.
2498            outlog:         Output the coefficients of the best-fit
2499                            function to logger (default is False)
2500            blfile:         Name of a text file in which the best-fit
2501                            parameter values to be written
2502                            (default is "": no file/logger output)
[2047]2503
2504        Example:
[2186]2505            bscan = scan.auto_sinusoid_baseline(addwn='<=10', insitu=False)
[2081]2506       
2507        Note:
2508            The best-fit parameter values output in logger and/or blfile are now
2509            based on specunit of 'channel'.
[2047]2510        """
2511
[2186]2512        try:
2513            varlist = vars()
[2047]2514
[2186]2515            if insitu is None: insitu = rcParams['insitu']
2516            if insitu:
2517                workscan = self
[2047]2518            else:
[2186]2519                workscan = self.copy()
2520           
[2277]2521            if mask           is None: mask           = [True for i in xrange(workscan.nchan())]
[2186]2522            if applyfft       is None: applyfft       = True
2523            if fftmethod      is None: fftmethod      = 'fft'
2524            if fftthresh      is None: fftthresh      = 3.0
2525            if addwn          is None: addwn          = []
2526            if rejwn          is None: rejwn          = []
2527            if clipthresh     is None: clipthresh     = 3.0
2528            if clipniter      is None: clipniter      = 0
2529            if edge           is None: edge           = (0,0)
2530            if threshold      is None: threshold      = 3
2531            if chan_avg_limit is None: chan_avg_limit = 1
2532            if plot           is None: plot           = False
2533            if getresidual    is None: getresidual    = True
[2189]2534            if showprogress   is None: showprogress   = True
2535            if minnrow        is None: minnrow        = 1000
[2186]2536            if outlog         is None: outlog         = False
2537            if blfile         is None: blfile         = ''
[2047]2538
[2277]2539            #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
2540            workscan._auto_sinusoid_baseline(mask, applyfft, fftmethod.lower(), str(fftthresh).lower(), workscan._parse_wn(addwn), workscan._parse_wn(rejwn), clipthresh, clipniter, normalise_edge_param(edge), threshold, chan_avg_limit, getresidual, pack_progress_params(showprogress, minnrow), outlog, blfile)
[2047]2541            workscan._add_history("auto_sinusoid_baseline", varlist)
2542           
2543            if insitu:
2544                self._assign(workscan)
2545            else:
2546                return workscan
2547           
2548        except RuntimeError, e:
[2186]2549            raise_fitting_failure_exception(e)
[2047]2550
2551    @asaplog_post_dec
[2277]2552    def cspline_baseline(self, insitu=None, mask=None, npiece=None, clipthresh=None, clipniter=None,
2553                         plot=None, getresidual=None, showprogress=None, minnrow=None, outlog=None, blfile=None):
[1846]2554        """\
[2277]2555        Return a scan which has been baselined (all rows) by cubic spline function (piecewise cubic polynomial).
[513]2556        Parameters:
[2189]2557            insitu:       If False a new scantable is returned.
2558                          Otherwise, the scaling is done in-situ
2559                          The default is taken from .asaprc (False)
2560            mask:         An optional mask
2561            npiece:       Number of pieces. (default is 2)
2562            clipthresh:   Clipping threshold. (default is 3.0, unit: sigma)
[2277]2563            clipniter:    maximum number of iteration of 'clipthresh'-sigma clipping (default is 0)
[2189]2564            plot:     *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2565                          plot the fit and the residual. In this each
2566                          indivual fit has to be approved, by typing 'y'
2567                          or 'n'
2568            getresidual:  if False, returns best-fit values instead of
2569                          residual. (default is True)
2570            showprogress: show progress status for large data.
2571                          default is True.
2572            minnrow:      minimum number of input spectra to show.
2573                          default is 1000.
2574            outlog:       Output the coefficients of the best-fit
2575                          function to logger (default is False)
2576            blfile:       Name of a text file in which the best-fit
2577                          parameter values to be written
2578                          (default is "": no file/logger output)
[1846]2579
[2012]2580        Example:
[2277]2581            # return a scan baselined by a cubic spline consisting of 2 pieces (i.e., 1 internal knot),
[2012]2582            # also with 3-sigma clipping, iteration up to 4 times
2583            bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
[2081]2584       
2585        Note:
2586            The best-fit parameter values output in logger and/or blfile are now
2587            based on specunit of 'channel'.
[2012]2588        """
2589       
[2186]2590        try:
2591            varlist = vars()
2592           
2593            if insitu is None: insitu = rcParams['insitu']
2594            if insitu:
2595                workscan = self
2596            else:
2597                workscan = self.copy()
[1855]2598
[2277]2599            if mask         is None: mask         = [True for i in xrange(workscan.nchan())]
[2189]2600            if npiece       is None: npiece       = 2
2601            if clipthresh   is None: clipthresh   = 3.0
2602            if clipniter    is None: clipniter    = 0
2603            if plot         is None: plot         = False
2604            if getresidual  is None: getresidual  = True
2605            if showprogress is None: showprogress = True
2606            if minnrow      is None: minnrow      = 1000
2607            if outlog       is None: outlog       = False
2608            if blfile       is None: blfile       = ''
[1855]2609
[2012]2610            #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
[2277]2611            workscan._cspline_baseline(mask, npiece, clipthresh, clipniter, getresidual, pack_progress_params(showprogress, minnrow), outlog, blfile)
[2012]2612            workscan._add_history("cspline_baseline", varlist)
2613           
2614            if insitu:
2615                self._assign(workscan)
2616            else:
2617                return workscan
2618           
2619        except RuntimeError, e:
[2186]2620            raise_fitting_failure_exception(e)
[1855]2621
[2186]2622    @asaplog_post_dec
[2277]2623    def auto_cspline_baseline(self, insitu=None, mask=None, npiece=None, clipthresh=None, clipniter=None,
2624                              edge=None, threshold=None, chan_avg_limit=None, getresidual=None, plot=None,
2625                              showprogress=None, minnrow=None, outlog=None, blfile=None):
[2012]2626        """\
2627        Return a scan which has been baselined (all rows) by cubic spline
2628        function (piecewise cubic polynomial).
2629        Spectral lines are detected first using linefinder and masked out
2630        to avoid them affecting the baseline solution.
2631
2632        Parameters:
[2189]2633            insitu:         if False a new scantable is returned.
2634                            Otherwise, the scaling is done in-situ
2635                            The default is taken from .asaprc (False)
2636            mask:           an optional mask retreived from scantable
2637            npiece:         Number of pieces. (default is 2)
2638            clipthresh:     Clipping threshold. (default is 3.0, unit: sigma)
[2277]2639            clipniter:      maximum number of iteration of 'clipthresh'-sigma clipping (default is 0)
[2189]2640            edge:           an optional number of channel to drop at
2641                            the edge of spectrum. If only one value is
2642                            specified, the same number will be dropped
2643                            from both sides of the spectrum. Default
2644                            is to keep all channels. Nested tuples
2645                            represent individual edge selection for
2646                            different IFs (a number of spectral channels
2647                            can be different)
2648            threshold:      the threshold used by line finder. It is
2649                            better to keep it large as only strong lines
2650                            affect the baseline solution.
2651            chan_avg_limit: a maximum number of consequtive spectral
2652                            channels to average during the search of
2653                            weak and broad lines. The default is no
2654                            averaging (and no search for weak lines).
2655                            If such lines can affect the fitted baseline
2656                            (e.g. a high order polynomial is fitted),
2657                            increase this parameter (usually values up
2658                            to 8 are reasonable). Most users of this
2659                            method should find the default value sufficient.
2660            plot:       *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2661                            plot the fit and the residual. In this each
2662                            indivual fit has to be approved, by typing 'y'
2663                            or 'n'
2664            getresidual:    if False, returns best-fit values instead of
2665                            residual. (default is True)
2666            showprogress:   show progress status for large data.
2667                            default is True.
2668            minnrow:        minimum number of input spectra to show.
2669                            default is 1000.
2670            outlog:         Output the coefficients of the best-fit
2671                            function to logger (default is False)
2672            blfile:         Name of a text file in which the best-fit
2673                            parameter values to be written
2674                            (default is "": no file/logger output)
[1846]2675
[1907]2676        Example:
[2012]2677            bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
[2081]2678       
2679        Note:
2680            The best-fit parameter values output in logger and/or blfile are now
2681            based on specunit of 'channel'.
[2012]2682        """
[1846]2683
[2186]2684        try:
2685            varlist = vars()
[2012]2686
[2186]2687            if insitu is None: insitu = rcParams['insitu']
2688            if insitu:
2689                workscan = self
[1391]2690            else:
[2186]2691                workscan = self.copy()
2692           
[2277]2693            if mask           is None: mask           = [True for i in xrange(workscan.nchan())]
[2186]2694            if npiece         is None: npiece         = 2
2695            if clipthresh     is None: clipthresh     = 3.0
2696            if clipniter      is None: clipniter      = 0
2697            if edge           is None: edge           = (0, 0)
2698            if threshold      is None: threshold      = 3
2699            if chan_avg_limit is None: chan_avg_limit = 1
2700            if plot           is None: plot           = False
2701            if getresidual    is None: getresidual    = True
[2189]2702            if showprogress   is None: showprogress   = True
2703            if minnrow        is None: minnrow        = 1000
[2186]2704            if outlog         is None: outlog         = False
2705            if blfile         is None: blfile         = ''
[1819]2706
[2277]2707            #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
[2269]2708            workscan._auto_cspline_baseline(mask, npiece, clipthresh,
2709                                            clipniter,
2710                                            normalise_edge_param(edge),
2711                                            threshold,
2712                                            chan_avg_limit, getresidual,
2713                                            pack_progress_params(showprogress,
2714                                                                 minnrow),
2715                                            outlog, blfile)
[2012]2716            workscan._add_history("auto_cspline_baseline", varlist)
[1907]2717           
[1856]2718            if insitu:
2719                self._assign(workscan)
2720            else:
2721                return workscan
[2012]2722           
2723        except RuntimeError, e:
[2186]2724            raise_fitting_failure_exception(e)
[513]2725
[1931]2726    @asaplog_post_dec
[2269]2727    def poly_baseline(self, mask=None, order=None, insitu=None, plot=None,
2728                      getresidual=None, showprogress=None, minnrow=None,
2729                      outlog=None, blfile=None):
[1907]2730        """\
2731        Return a scan which has been baselined (all rows) by a polynomial.
2732        Parameters:
[2189]2733            insitu:       if False a new scantable is returned.
2734                          Otherwise, the scaling is done in-situ
2735                          The default is taken from .asaprc (False)
2736            mask:         an optional mask
2737            order:        the order of the polynomial (default is 0)
2738            plot:         plot the fit and the residual. In this each
2739                          indivual fit has to be approved, by typing 'y'
2740                          or 'n'
2741            getresidual:  if False, returns best-fit values instead of
2742                          residual. (default is True)
2743            showprogress: show progress status for large data.
2744                          default is True.
2745            minnrow:      minimum number of input spectra to show.
2746                          default is 1000.
2747            outlog:       Output the coefficients of the best-fit
2748                          function to logger (default is False)
2749            blfile:       Name of a text file in which the best-fit
2750                          parameter values to be written
2751                          (default is "": no file/logger output)
[2012]2752
[1907]2753        Example:
2754            # return a scan baselined by a third order polynomial,
2755            # not using a mask
2756            bscan = scan.poly_baseline(order=3)
2757        """
[1931]2758       
[2186]2759        try:
2760            varlist = vars()
[1931]2761       
[2269]2762            if insitu is None:
2763                insitu = rcParams["insitu"]
[2186]2764            if insitu:
2765                workscan = self
2766            else:
2767                workscan = self.copy()
[1907]2768
[2277]2769            if mask         is None: mask         = [True for i in \
2770                                                       xrange(workscan.nchan())]
[2189]2771            if order        is None: order        = 0
2772            if plot         is None: plot         = False
2773            if getresidual  is None: getresidual  = True
2774            if showprogress is None: showprogress = True
2775            if minnrow      is None: minnrow      = 1000
2776            if outlog       is None: outlog       = False
2777            if blfile       is None: blfile       = ""
[1907]2778
[2012]2779            if plot:
[2269]2780                outblfile = (blfile != "") and \
2781                    os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
2782                if outblfile:
2783                    blf = open(blfile, "a")
[2012]2784               
[1907]2785                f = fitter()
2786                f.set_function(lpoly=order)
[2186]2787               
2788                rows = xrange(workscan.nrow())
2789                #if len(rows) > 0: workscan._init_blinfo()
2790               
[1907]2791                for r in rows:
2792                    f.x = workscan._getabcissa(r)
2793                    f.y = workscan._getspectrum(r)
2794                    f.mask = mask_and(mask, workscan._getmask(r))    # (CAS-1434)
2795                    f.data = None
2796                    f.fit()
2797                   
2798                    f.plot(residual=True)
2799                    accept_fit = raw_input("Accept fit ( [y]/n ): ")
2800                    if accept_fit.upper() == "N":
[2012]2801                        #workscan._append_blinfo(None, None, None)
[1907]2802                        continue
[2012]2803                   
2804                    blpars = f.get_parameters()
2805                    masklist = workscan.get_masklist(f.mask, row=r, silent=True)
2806                    #workscan._append_blinfo(blpars, masklist, f.mask)
[2269]2807                    workscan._setspectrum((f.fitter.getresidual()
2808                                           if getresidual else f.fitter.getfit()), r)
[1907]2809                   
[2012]2810                    if outblfile:
2811                        rms = workscan.get_rms(f.mask, r)
[2269]2812                        dataout = \
2813                            workscan.format_blparams_row(blpars["params"],
2814                                                         blpars["fixed"],
2815                                                         rms, str(masklist),
2816                                                         r, True)
[2012]2817                        blf.write(dataout)
2818
[1907]2819                f._p.unmap()
2820                f._p = None
[2012]2821
2822                if outblfile: blf.close()
[1907]2823            else:
[2269]2824                workscan._poly_baseline(mask, order, getresidual,
2825                                        pack_progress_params(showprogress,
2826                                                             minnrow),
2827                                        outlog, blfile)
[1907]2828           
2829            workscan._add_history("poly_baseline", varlist)
2830           
2831            if insitu:
2832                self._assign(workscan)
2833            else:
2834                return workscan
2835           
[1919]2836        except RuntimeError, e:
[2186]2837            raise_fitting_failure_exception(e)
[1907]2838
[2186]2839    @asaplog_post_dec
[2269]2840    def auto_poly_baseline(self, mask=None, order=None, edge=None,
2841                           threshold=None, chan_avg_limit=None,
2842                           plot=None, insitu=None,
2843                           getresidual=None, showprogress=None,
2844                           minnrow=None, outlog=None, blfile=None):
[1846]2845        """\
[1931]2846        Return a scan which has been baselined (all rows) by a polynomial.
[880]2847        Spectral lines are detected first using linefinder and masked out
2848        to avoid them affecting the baseline solution.
2849
2850        Parameters:
[2189]2851            insitu:         if False a new scantable is returned.
2852                            Otherwise, the scaling is done in-situ
2853                            The default is taken from .asaprc (False)
2854            mask:           an optional mask retreived from scantable
2855            order:          the order of the polynomial (default is 0)
2856            edge:           an optional number of channel to drop at
2857                            the edge of spectrum. If only one value is
2858                            specified, the same number will be dropped
2859                            from both sides of the spectrum. Default
2860                            is to keep all channels. Nested tuples
2861                            represent individual edge selection for
2862                            different IFs (a number of spectral channels
2863                            can be different)
2864            threshold:      the threshold used by line finder. It is
2865                            better to keep it large as only strong lines
2866                            affect the baseline solution.
2867            chan_avg_limit: a maximum number of consequtive spectral
2868                            channels to average during the search of
2869                            weak and broad lines. The default is no
2870                            averaging (and no search for weak lines).
2871                            If such lines can affect the fitted baseline
2872                            (e.g. a high order polynomial is fitted),
2873                            increase this parameter (usually values up
2874                            to 8 are reasonable). Most users of this
2875                            method should find the default value sufficient.
2876            plot:           plot the fit and the residual. In this each
2877                            indivual fit has to be approved, by typing 'y'
2878                            or 'n'
2879            getresidual:    if False, returns best-fit values instead of
2880                            residual. (default is True)
2881            showprogress:   show progress status for large data.
2882                            default is True.
2883            minnrow:        minimum number of input spectra to show.
2884                            default is 1000.
2885            outlog:         Output the coefficients of the best-fit
2886                            function to logger (default is False)
2887            blfile:         Name of a text file in which the best-fit
2888                            parameter values to be written
2889                            (default is "": no file/logger output)
[1846]2890
[2012]2891        Example:
2892            bscan = scan.auto_poly_baseline(order=7, insitu=False)
2893        """
[880]2894
[2186]2895        try:
2896            varlist = vars()
[1846]2897
[2269]2898            if insitu is None:
2899                insitu = rcParams['insitu']
[2186]2900            if insitu:
2901                workscan = self
2902            else:
2903                workscan = self.copy()
[1846]2904
[2277]2905            if mask           is None: mask           = [True for i in xrange(workscan.nchan())]
[2186]2906            if order          is None: order          = 0
2907            if edge           is None: edge           = (0, 0)
2908            if threshold      is None: threshold      = 3
2909            if chan_avg_limit is None: chan_avg_limit = 1
2910            if plot           is None: plot           = False
2911            if getresidual    is None: getresidual    = True
[2189]2912            if showprogress   is None: showprogress   = True
2913            if minnrow        is None: minnrow        = 1000
[2186]2914            if outlog         is None: outlog         = False
2915            if blfile         is None: blfile         = ''
[1846]2916
[2186]2917            edge = normalise_edge_param(edge)
[880]2918
[2012]2919            if plot:
[2269]2920                outblfile = (blfile != "") and \
2921                    os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
[2012]2922                if outblfile: blf = open(blfile, "a")
2923               
[2186]2924                from asap.asaplinefind import linefinder
[2012]2925                fl = linefinder()
[2269]2926                fl.set_options(threshold=threshold, avg_limit=chan_avg_limit)
[2012]2927                fl.set_scan(workscan)
[2186]2928               
[2012]2929                f = fitter()
2930                f.set_function(lpoly=order)
[880]2931
[2186]2932                rows = xrange(workscan.nrow())
2933                #if len(rows) > 0: workscan._init_blinfo()
2934               
[2012]2935                for r in rows:
[2186]2936                    idx = 2*workscan.getif(r)
[2269]2937                    fl.find_lines(r, mask_and(mask, workscan._getmask(r)),
2938                                  edge[idx:idx+2])  # (CAS-1434)
[907]2939
[2012]2940                    f.x = workscan._getabcissa(r)
2941                    f.y = workscan._getspectrum(r)
2942                    f.mask = fl.get_mask()
2943                    f.data = None
2944                    f.fit()
2945
2946                    f.plot(residual=True)
2947                    accept_fit = raw_input("Accept fit ( [y]/n ): ")
2948                    if accept_fit.upper() == "N":
2949                        #workscan._append_blinfo(None, None, None)
2950                        continue
2951
2952                    blpars = f.get_parameters()
2953                    masklist = workscan.get_masklist(f.mask, row=r, silent=True)
2954                    #workscan._append_blinfo(blpars, masklist, f.mask)
[2269]2955                    workscan._setspectrum((f.fitter.getresidual() \
2956                                               if getresidual else f.fitter.getfit()), r)
[2012]2957
2958                    if outblfile:
2959                        rms = workscan.get_rms(f.mask, r)
[2269]2960                        dataout = \
2961                            workscan.format_blparams_row(blpars["params"],
2962                                                         blpars["fixed"],
2963                                                         rms, str(masklist),
2964                                                         r, True)
[2012]2965                        blf.write(dataout)
2966                   
2967                f._p.unmap()
2968                f._p = None
2969
2970                if outblfile: blf.close()
2971            else:
[2269]2972                workscan._auto_poly_baseline(mask, order, edge, threshold,
2973                                             chan_avg_limit, getresidual,
2974                                             pack_progress_params(showprogress,
2975                                                                  minnrow),
2976                                             outlog, blfile)
[2012]2977
2978            workscan._add_history("auto_poly_baseline", varlist)
2979           
2980            if insitu:
2981                self._assign(workscan)
2982            else:
2983                return workscan
2984           
2985        except RuntimeError, e:
[2186]2986            raise_fitting_failure_exception(e)
[2012]2987
2988    def _init_blinfo(self):
2989        """\
2990        Initialise the following three auxiliary members:
2991           blpars : parameters of the best-fit baseline,
2992           masklists : mask data (edge positions of masked channels) and
2993           actualmask : mask data (in boolean list),
2994        to keep for use later (including output to logger/text files).
2995        Used by poly_baseline() and auto_poly_baseline() in case of
2996        'plot=True'.
2997        """
2998        self.blpars = []
2999        self.masklists = []
3000        self.actualmask = []
3001        return
[880]3002
[2012]3003    def _append_blinfo(self, data_blpars, data_masklists, data_actualmask):
3004        """\
3005        Append baseline-fitting related info to blpars, masklist and
3006        actualmask.
3007        """
3008        self.blpars.append(data_blpars)
3009        self.masklists.append(data_masklists)
3010        self.actualmask.append(data_actualmask)
3011        return
3012       
[1862]3013    @asaplog_post_dec
[914]3014    def rotate_linpolphase(self, angle):
[1846]3015        """\
[914]3016        Rotate the phase of the complex polarization O=Q+iU correlation.
3017        This is always done in situ in the raw data.  So if you call this
3018        function more than once then each call rotates the phase further.
[1846]3019
[914]3020        Parameters:
[1846]3021
[914]3022            angle:   The angle (degrees) to rotate (add) by.
[1846]3023
3024        Example::
3025
[914]3026            scan.rotate_linpolphase(2.3)
[1846]3027
[914]3028        """
3029        varlist = vars()
[936]3030        self._math._rotate_linpolphase(self, angle)
[914]3031        self._add_history("rotate_linpolphase", varlist)
3032        return
[710]3033
[1862]3034    @asaplog_post_dec
[914]3035    def rotate_xyphase(self, angle):
[1846]3036        """\
[914]3037        Rotate the phase of the XY correlation.  This is always done in situ
3038        in the data.  So if you call this function more than once
3039        then each call rotates the phase further.
[1846]3040
[914]3041        Parameters:
[1846]3042
[914]3043            angle:   The angle (degrees) to rotate (add) by.
[1846]3044
3045        Example::
3046
[914]3047            scan.rotate_xyphase(2.3)
[1846]3048
[914]3049        """
3050        varlist = vars()
[936]3051        self._math._rotate_xyphase(self, angle)
[914]3052        self._add_history("rotate_xyphase", varlist)
3053        return
3054
[1862]3055    @asaplog_post_dec
[914]3056    def swap_linears(self):
[1846]3057        """\
[1573]3058        Swap the linear polarisations XX and YY, or better the first two
[1348]3059        polarisations as this also works for ciculars.
[914]3060        """
3061        varlist = vars()
[936]3062        self._math._swap_linears(self)
[914]3063        self._add_history("swap_linears", varlist)
3064        return
3065
[1862]3066    @asaplog_post_dec
[914]3067    def invert_phase(self):
[1846]3068        """\
[914]3069        Invert the phase of the complex polarisation
3070        """
3071        varlist = vars()
[936]3072        self._math._invert_phase(self)
[914]3073        self._add_history("invert_phase", varlist)
3074        return
3075
[1862]3076    @asaplog_post_dec
[876]3077    def add(self, offset, insitu=None):
[1846]3078        """\
[513]3079        Return a scan where all spectra have the offset added
[1846]3080
[513]3081        Parameters:
[1846]3082
[513]3083            offset:      the offset
[1855]3084
[513]3085            insitu:      if False a new scantable is returned.
3086                         Otherwise, the scaling is done in-situ
3087                         The default is taken from .asaprc (False)
[1846]3088
[513]3089        """
3090        if insitu is None: insitu = rcParams['insitu']
[876]3091        self._math._setinsitu(insitu)
[513]3092        varlist = vars()
[876]3093        s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]3094        s._add_history("add", varlist)
[876]3095        if insitu:
3096            self._assign(s)
3097        else:
[513]3098            return s
3099
[1862]3100    @asaplog_post_dec
[1308]3101    def scale(self, factor, tsys=True, insitu=None):
[1846]3102        """\
3103
[1938]3104        Return a scan where all spectra are scaled by the given 'factor'
[1846]3105
[513]3106        Parameters:
[1846]3107
[1819]3108            factor:      the scaling factor (float or 1D float list)
[1855]3109
[513]3110            insitu:      if False a new scantable is returned.
3111                         Otherwise, the scaling is done in-situ
3112                         The default is taken from .asaprc (False)
[1855]3113
[513]3114            tsys:        if True (default) then apply the operation to Tsys
3115                         as well as the data
[1846]3116
[513]3117        """
3118        if insitu is None: insitu = rcParams['insitu']
[876]3119        self._math._setinsitu(insitu)
[513]3120        varlist = vars()
[1819]3121        s = None
3122        import numpy
3123        if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
[2320]3124            if isinstance(factor[0], list) or isinstance(factor[0],
3125                                                         numpy.ndarray):
[1819]3126                from asapmath import _array2dOp
[2320]3127                s = _array2dOp( self, factor, "MUL", tsys, insitu )
[1819]3128            else:
[2320]3129                s = scantable( self._math._arrayop( self, factor,
3130                                                    "MUL", tsys ) )
[1819]3131        else:
[2320]3132            s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
[1118]3133        s._add_history("scale", varlist)
[876]3134        if insitu:
3135            self._assign(s)
3136        else:
[513]3137            return s
3138
[1504]3139    def set_sourcetype(self, match, matchtype="pattern",
3140                       sourcetype="reference"):
[1846]3141        """\
[1502]3142        Set the type of the source to be an source or reference scan
[1846]3143        using the provided pattern.
3144
[1502]3145        Parameters:
[1846]3146
[1504]3147            match:          a Unix style pattern, regular expression or selector
[1855]3148
[1504]3149            matchtype:      'pattern' (default) UNIX style pattern or
3150                            'regex' regular expression
[1855]3151
[1502]3152            sourcetype:     the type of the source to use (source/reference)
[1846]3153
[1502]3154        """
3155        varlist = vars()
3156        basesel = self.get_selection()
3157        stype = -1
3158        if sourcetype.lower().startswith("r"):
3159            stype = 1
3160        elif sourcetype.lower().startswith("s"):
3161            stype = 0
[1504]3162        else:
[1502]3163            raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
[1504]3164        if matchtype.lower().startswith("p"):
3165            matchtype = "pattern"
3166        elif matchtype.lower().startswith("r"):
3167            matchtype = "regex"
3168        else:
3169            raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
[1502]3170        sel = selector()
3171        if isinstance(match, selector):
3172            sel = match
3173        else:
[1504]3174            sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
[1502]3175        self.set_selection(basesel+sel)
3176        self._setsourcetype(stype)
3177        self.set_selection(basesel)
[1573]3178        self._add_history("set_sourcetype", varlist)
[1502]3179
[1862]3180    @asaplog_post_dec
[1857]3181    @preserve_selection
[1819]3182    def auto_quotient(self, preserve=True, mode='paired', verify=False):
[1846]3183        """\
[670]3184        This function allows to build quotients automatically.
[1819]3185        It assumes the observation to have the same number of
[670]3186        "ons" and "offs"
[1846]3187
[670]3188        Parameters:
[1846]3189
[710]3190            preserve:       you can preserve (default) the continuum or
3191                            remove it.  The equations used are
[1857]3192
[670]3193                            preserve: Output = Toff * (on/off) - Toff
[1857]3194
[1070]3195                            remove:   Output = Toff * (on/off) - Ton
[1855]3196
[1573]3197            mode:           the on/off detection mode
[1348]3198                            'paired' (default)
3199                            identifies 'off' scans by the
3200                            trailing '_R' (Mopra/Parkes) or
3201                            '_e'/'_w' (Tid) and matches
3202                            on/off pairs from the observing pattern
[1502]3203                            'time'
3204                            finds the closest off in time
[1348]3205
[1857]3206        .. todo:: verify argument is not implemented
3207
[670]3208        """
[1857]3209        varlist = vars()
[1348]3210        modes = ["time", "paired"]
[670]3211        if not mode in modes:
[876]3212            msg = "please provide valid mode. Valid modes are %s" % (modes)
3213            raise ValueError(msg)
[1348]3214        s = None
3215        if mode.lower() == "paired":
[1857]3216            sel = self.get_selection()
[1875]3217            sel.set_query("SRCTYPE==psoff")
[1356]3218            self.set_selection(sel)
[1348]3219            offs = self.copy()
[1875]3220            sel.set_query("SRCTYPE==pson")
[1356]3221            self.set_selection(sel)
[1348]3222            ons = self.copy()
3223            s = scantable(self._math._quotient(ons, offs, preserve))
3224        elif mode.lower() == "time":
3225            s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]3226        s._add_history("auto_quotient", varlist)
[876]3227        return s
[710]3228
[1862]3229    @asaplog_post_dec
[1145]3230    def mx_quotient(self, mask = None, weight='median', preserve=True):
[1846]3231        """\
[1143]3232        Form a quotient using "off" beams when observing in "MX" mode.
[1846]3233
[1143]3234        Parameters:
[1846]3235
[1145]3236            mask:           an optional mask to be used when weight == 'stddev'
[1855]3237
[1143]3238            weight:         How to average the off beams.  Default is 'median'.
[1855]3239
[1145]3240            preserve:       you can preserve (default) the continuum or
[1855]3241                            remove it.  The equations used are:
[1846]3242
[1855]3243                                preserve: Output = Toff * (on/off) - Toff
3244
3245                                remove:   Output = Toff * (on/off) - Ton
3246
[1217]3247        """
[1593]3248        mask = mask or ()
[1141]3249        varlist = vars()
3250        on = scantable(self._math._mx_extract(self, 'on'))
[1143]3251        preoff = scantable(self._math._mx_extract(self, 'off'))
3252        off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1217]3253        from asapmath  import quotient
[1145]3254        q = quotient(on, off, preserve)
[1143]3255        q._add_history("mx_quotient", varlist)
[1217]3256        return q
[513]3257
[1862]3258    @asaplog_post_dec
[718]3259    def freq_switch(self, insitu=None):
[1846]3260        """\
[718]3261        Apply frequency switching to the data.
[1846]3262
[718]3263        Parameters:
[1846]3264
[718]3265            insitu:      if False a new scantable is returned.
3266                         Otherwise, the swictching is done in-situ
3267                         The default is taken from .asaprc (False)
[1846]3268
[718]3269        """
3270        if insitu is None: insitu = rcParams['insitu']
[876]3271        self._math._setinsitu(insitu)
[718]3272        varlist = vars()
[876]3273        s = scantable(self._math._freqswitch(self))
[1118]3274        s._add_history("freq_switch", varlist)
[1856]3275        if insitu:
3276            self._assign(s)
3277        else:
3278            return s
[718]3279
[1862]3280    @asaplog_post_dec
[780]3281    def recalc_azel(self):
[1846]3282        """Recalculate the azimuth and elevation for each position."""
[780]3283        varlist = vars()
[876]3284        self._recalcazel()
[780]3285        self._add_history("recalc_azel", varlist)
3286        return
3287
[1862]3288    @asaplog_post_dec
[513]3289    def __add__(self, other):
3290        varlist = vars()
3291        s = None
3292        if isinstance(other, scantable):
[1573]3293            s = scantable(self._math._binaryop(self, other, "ADD"))
[513]3294        elif isinstance(other, float):
[876]3295            s = scantable(self._math._unaryop(self, other, "ADD", False))
[2144]3296        elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3297            if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
3298                from asapmath import _array2dOp
3299                s = _array2dOp( self.copy(), other, "ADD", False )
3300            else:
3301                s = scantable( self._math._arrayop( self.copy(), other, "ADD", False ) )
[513]3302        else:
[718]3303            raise TypeError("Other input is not a scantable or float value")
[513]3304        s._add_history("operator +", varlist)
3305        return s
3306
[1862]3307    @asaplog_post_dec
[513]3308    def __sub__(self, other):
3309        """
3310        implicit on all axes and on Tsys
3311        """
3312        varlist = vars()
3313        s = None
3314        if isinstance(other, scantable):
[1588]3315            s = scantable(self._math._binaryop(self, other, "SUB"))
[513]3316        elif isinstance(other, float):
[876]3317            s = scantable(self._math._unaryop(self, other, "SUB", False))
[2144]3318        elif isinstance(other, list) or isinstance(other, numpy.ndarray):
[2277]3319            if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
[2144]3320                from asapmath import _array2dOp
3321                s = _array2dOp( self.copy(), other, "SUB", False )
3322            else:
[2277]3323                s = scantable( self._math._arrayop( self.copy(), other, "SUB", False ) )
[513]3324        else:
[718]3325            raise TypeError("Other input is not a scantable or float value")
[513]3326        s._add_history("operator -", varlist)
3327        return s
[710]3328
[1862]3329    @asaplog_post_dec
[513]3330    def __mul__(self, other):
3331        """
3332        implicit on all axes and on Tsys
3333        """
3334        varlist = vars()
3335        s = None
3336        if isinstance(other, scantable):
[1588]3337            s = scantable(self._math._binaryop(self, other, "MUL"))
[513]3338        elif isinstance(other, float):
[876]3339            s = scantable(self._math._unaryop(self, other, "MUL", False))
[2144]3340        elif isinstance(other, list) or isinstance(other, numpy.ndarray):
[2277]3341            if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
[2144]3342                from asapmath import _array2dOp
3343                s = _array2dOp( self.copy(), other, "MUL", False )
3344            else:
[2277]3345                s = scantable( self._math._arrayop( self.copy(), other, "MUL", False ) )
[513]3346        else:
[718]3347            raise TypeError("Other input is not a scantable or float value")
[513]3348        s._add_history("operator *", varlist)
3349        return s
3350
[710]3351
[1862]3352    @asaplog_post_dec
[513]3353    def __div__(self, other):
3354        """
3355        implicit on all axes and on Tsys
3356        """
3357        varlist = vars()
3358        s = None
3359        if isinstance(other, scantable):
[1589]3360            s = scantable(self._math._binaryop(self, other, "DIV"))
[513]3361        elif isinstance(other, float):
3362            if other == 0.0:
[718]3363                raise ZeroDivisionError("Dividing by zero is not recommended")
[876]3364            s = scantable(self._math._unaryop(self, other, "DIV", False))
[2144]3365        elif isinstance(other, list) or isinstance(other, numpy.ndarray):
[2277]3366            if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
[2144]3367                from asapmath import _array2dOp
3368                s = _array2dOp( self.copy(), other, "DIV", False )
3369            else:
[2277]3370                s = scantable( self._math._arrayop( self.copy(), other, "DIV", False ) )
[513]3371        else:
[718]3372            raise TypeError("Other input is not a scantable or float value")
[513]3373        s._add_history("operator /", varlist)
3374        return s
3375
[1862]3376    @asaplog_post_dec
[530]3377    def get_fit(self, row=0):
[1846]3378        """\
[530]3379        Print or return the stored fits for a row in the scantable
[1846]3380
[530]3381        Parameters:
[1846]3382
[530]3383            row:    the row which the fit has been applied to.
[1846]3384
[530]3385        """
3386        if row > self.nrow():
3387            return
[976]3388        from asap.asapfit import asapfit
[530]3389        fit = asapfit(self._getfit(row))
[1859]3390        asaplog.push( '%s' %(fit) )
3391        return fit.as_dict()
[530]3392
[1483]3393    def flag_nans(self):
[1846]3394        """\
[1483]3395        Utility function to flag NaN values in the scantable.
3396        """
3397        import numpy
3398        basesel = self.get_selection()
3399        for i in range(self.nrow()):
[1589]3400            sel = self.get_row_selector(i)
3401            self.set_selection(basesel+sel)
[1483]3402            nans = numpy.isnan(self._getspectrum(0))
3403        if numpy.any(nans):
3404            bnans = [ bool(v) for v in nans]
3405            self.flag(bnans)
3406        self.set_selection(basesel)
3407
[1588]3408    def get_row_selector(self, rowno):
[1992]3409        #return selector(beams=self.getbeam(rowno),
3410        #                ifs=self.getif(rowno),
3411        #                pols=self.getpol(rowno),
3412        #                scans=self.getscan(rowno),
3413        #                cycles=self.getcycle(rowno))
3414        return selector(rows=[rowno])
[1573]3415
[484]3416    def _add_history(self, funcname, parameters):
[1435]3417        if not rcParams['scantable.history']:
3418            return
[484]3419        # create date
3420        sep = "##"
3421        from datetime import datetime
3422        dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
3423        hist = dstr+sep
3424        hist += funcname+sep#cdate+sep
3425        if parameters.has_key('self'): del parameters['self']
[1118]3426        for k, v in parameters.iteritems():
[484]3427            if type(v) is dict:
[1118]3428                for k2, v2 in v.iteritems():
[484]3429                    hist += k2
3430                    hist += "="
[1118]3431                    if isinstance(v2, scantable):
[484]3432                        hist += 'scantable'
3433                    elif k2 == 'mask':
[1118]3434                        if isinstance(v2, list) or isinstance(v2, tuple):
[513]3435                            hist += str(self._zip_mask(v2))
3436                        else:
3437                            hist += str(v2)
[484]3438                    else:
[513]3439                        hist += str(v2)
[484]3440            else:
3441                hist += k
3442                hist += "="
[1118]3443                if isinstance(v, scantable):
[484]3444                    hist += 'scantable'
3445                elif k == 'mask':
[1118]3446                    if isinstance(v, list) or isinstance(v, tuple):
[513]3447                        hist += str(self._zip_mask(v))
3448                    else:
3449                        hist += str(v)
[484]3450                else:
3451                    hist += str(v)
3452            hist += sep
3453        hist = hist[:-2] # remove trailing '##'
3454        self._addhistory(hist)
3455
[710]3456
[484]3457    def _zip_mask(self, mask):
3458        mask = list(mask)
3459        i = 0
3460        segments = []
3461        while mask[i:].count(1):
3462            i += mask[i:].index(1)
3463            if mask[i:].count(0):
3464                j = i + mask[i:].index(0)
3465            else:
[710]3466                j = len(mask)
[1118]3467            segments.append([i, j])
[710]3468            i = j
[484]3469        return segments
[714]3470
[626]3471    def _get_ordinate_label(self):
3472        fu = "("+self.get_fluxunit()+")"
3473        import re
3474        lbl = "Intensity"
[1118]3475        if re.match(".K.", fu):
[626]3476            lbl = "Brightness Temperature "+ fu
[1118]3477        elif re.match(".Jy.", fu):
[626]3478            lbl = "Flux density "+ fu
3479        return lbl
[710]3480
[876]3481    def _check_ifs(self):
[1986]3482        #nchans = [self.nchan(i) for i in range(self.nif(-1))]
3483        nchans = [self.nchan(i) for i in self.getifnos()]
[2004]3484        nchans = filter(lambda t: t > 0, nchans)
[876]3485        return (sum(nchans)/len(nchans) == nchans[0])
[976]3486
[1862]3487    @asaplog_post_dec
[1916]3488    #def _fill(self, names, unit, average, getpt, antenna):
3489    def _fill(self, names, unit, average, opts={}):
[976]3490        first = True
3491        fullnames = []
3492        for name in names:
3493            name = os.path.expandvars(name)
3494            name = os.path.expanduser(name)
3495            if not os.path.exists(name):
3496                msg = "File '%s' does not exists" % (name)
3497                raise IOError(msg)
3498            fullnames.append(name)
3499        if average:
3500            asaplog.push('Auto averaging integrations')
[1079]3501        stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]3502        for name in fullnames:
[1073]3503            tbl = Scantable(stype)
[2004]3504            if is_ms( name ):
3505                r = msfiller( tbl )
3506            else:
3507                r = filler( tbl )
3508                rx = rcParams['scantable.reference']
3509                r.setreferenceexpr(rx)
3510            #r = filler(tbl)
3511            #rx = rcParams['scantable.reference']
3512            #r.setreferenceexpr(rx)
[976]3513            msg = "Importing %s..." % (name)
[1118]3514            asaplog.push(msg, False)
[1916]3515            #opts = {'ms': {'antenna' : antenna, 'getpt': getpt} }
[1904]3516            r.open(name, opts)# antenna, -1, -1, getpt)
[1843]3517            r.fill()
[976]3518            if average:
[1118]3519                tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]3520            if not first:
3521                tbl = self._math._merge([self, tbl])
3522            Scantable.__init__(self, tbl)
[1843]3523            r.close()
[1118]3524            del r, tbl
[976]3525            first = False
[1861]3526            #flush log
3527        asaplog.post()
[976]3528        if unit is not None:
3529            self.set_fluxunit(unit)
[1824]3530        if not is_casapy():
3531            self.set_freqframe(rcParams['scantable.freqframe'])
[976]3532
[2012]3533
[1402]3534    def __getitem__(self, key):
3535        if key < 0:
3536            key += self.nrow()
3537        if key >= self.nrow():
3538            raise IndexError("Row index out of range.")
3539        return self._getspectrum(key)
3540
3541    def __setitem__(self, key, value):
3542        if key < 0:
3543            key += self.nrow()
3544        if key >= self.nrow():
3545            raise IndexError("Row index out of range.")
3546        if not hasattr(value, "__len__") or \
3547                len(value) > self.nchan(self.getif(key)):
3548            raise ValueError("Spectrum length doesn't match.")
3549        return self._setspectrum(value, key)
3550
3551    def __len__(self):
3552        return self.nrow()
3553
3554    def __iter__(self):
3555        for i in range(len(self)):
3556            yield self[i]
Note: See TracBrowser for help on using the repository browser.