source: trunk/python/scantable.py @ 1902

Last change on this file since 1902 was 1893, checked in by Takeshi Nakazato, 14 years ago

New Development: No

JIRA Issue: No

Ready for Test: Yes

Interface Changes: No

What Interface Changed: Please list interface changes

Test Programs: List test programs

Put in Release Notes: Yes/No?

Module(s): Module Names change impacts.

Description: Describe your changes here...

Bug fix on scantable constructor.


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 86.7 KB
RevLine 
[1846]1"""This module defines the scantable class."""
2
[1697]3import os
[1691]4try:
5    from functools import wraps as wraps_dec
6except ImportError:
7    from asap.compatibility import wraps as wraps_dec
8
[1824]9from asap.env import is_casapy
[876]10from asap._asap import Scantable
[1843]11from asap._asap import filler
[1824]12from asap.parameters import rcParams
[1862]13from asap.logging import asaplog, asaplog_post_dec
[1824]14from asap.selector import selector
15from asap.linecatalog import linecatalog
[1600]16from asap.coordinate import coordinate
[1859]17from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
[102]18
[1689]19
20def preserve_selection(func):
[1691]21    @wraps_dec(func)
[1689]22    def wrap(obj, *args, **kw):
23        basesel = obj.get_selection()
[1857]24        try:
25            val = func(obj, *args, **kw)
26        finally:
27            obj.set_selection(basesel)
[1689]28        return val
29    return wrap
30
[1846]31def is_scantable(filename):
32    """Is the given file a scantable?
[1689]33
[1846]34    Parameters:
35
36        filename: the name of the file/directory to test
37
38    """
[1883]39    if ( os.path.isdir(filename)
40         and os.path.exists(filename+'/table.info')
41         and os.path.exists(filename+'/table.dat') ):
42        f=open(filename+'/table.info')
43        l=f.readline()
44        f.close()
45        #if ( l.find('Scantable') != -1 ):
46        if ( l.find('Measurement Set') == -1 ):
47            return True
48        else:
49            return False
50    else:
51        return False
52##     return (os.path.isdir(filename)
53##             and not os.path.exists(filename+'/table.f1')
54##             and os.path.exists(filename+'/table.info'))
[1697]55
[1883]56def is_ms(filename):
57    """Is the given file a MeasurementSet?
[1697]58
[1883]59    Parameters:
60
61        filename: the name of the file/directory to test
62
63    """
64    if ( os.path.isdir(filename)
65         and os.path.exists(filename+'/table.info')
66         and os.path.exists(filename+'/table.dat') ):
67        f=open(filename+'/table.info')
68        l=f.readline()
69        f.close()
70        if ( l.find('Measurement Set') != -1 ):
71            return True
72        else:
73            return False
74    else:
75        return False
76   
[876]77class scantable(Scantable):
[1846]78    """\
79        The ASAP container for scans (single-dish data).
[102]80    """
[1819]81
[1862]82    @asaplog_post_dec
[1824]83    def __init__(self, filename, average=None, unit=None, getpt=None,
84                 antenna=None, parallactify=None):
[1846]85        """\
[102]86        Create a scantable from a saved one or make a reference
[1846]87
[102]88        Parameters:
[1846]89
90            filename:     the name of an asap table on disk
91                          or
92                          the name of a rpfits/sdfits/ms file
93                          (integrations within scans are auto averaged
94                          and the whole file is read) or
95                          [advanced] a reference to an existing scantable
96
97            average:      average all integrations withinb a scan on read.
98                          The default (True) is taken from .asaprc.
99
[484]100            unit:         brightness unit; must be consistent with K or Jy.
[1846]101                          Over-rides the default selected by the filler
102                          (input rpfits/sdfits/ms) or replaces the value
103                          in existing scantables
104
105            getpt:        for MeasurementSet input data only:
106                          If True, all pointing data are filled.
107                          The deafult is False, which makes time to load
108                          the MS data faster in some cases.
109
110            antenna:      Antenna selection. integer (id) or string (name or id).
111
112            parallactify: Indicate that the data had been parallatified. Default
113                          is taken from rc file.
114
[710]115        """
[976]116        if average is None:
[710]117            average = rcParams['scantable.autoaverage']
[1819]118        if getpt is None:
119            getpt = True
[1843]120        if antenna is not None:
121            asaplog.push("Antenna selection currently unsupported."
[1883]122                         "Using ''")
[1861]123            asaplog.post('WARN')
[1819]124        if antenna is None:
125            antenna = ''
126        elif type(antenna) == int:
[1843]127            antenna = '%s' % antenna
[1819]128        elif type(antenna) == list:
129            tmpstr = ''
130            for i in range( len(antenna) ):
131                if type(antenna[i]) == int:
132                    tmpstr = tmpstr + ('%s,'%(antenna[i]))
133                elif type(antenna[i]) == str:
134                    tmpstr=tmpstr+antenna[i]+','
135                else:
[1859]136                    raise TypeError('Bad antenna selection.')
[1819]137            antenna = tmpstr.rstrip(',')
[1593]138        parallactify = parallactify or rcParams['scantable.parallactify']
[1259]139        varlist = vars()
[876]140        from asap._asap import stmath
[1819]141        self._math = stmath( rcParams['insitu'] )
[876]142        if isinstance(filename, Scantable):
143            Scantable.__init__(self, filename)
[181]144        else:
[1697]145            if isinstance(filename, str):
[976]146                filename = os.path.expandvars(filename)
147                filename = os.path.expanduser(filename)
148                if not os.path.exists(filename):
149                    s = "File '%s' not found." % (filename)
150                    raise IOError(s)
[1697]151                if is_scantable(filename):
152                    ondisk = rcParams['scantable.storage'] == 'disk'
153                    Scantable.__init__(self, filename, ondisk)
154                    if unit is not None:
155                        self.set_fluxunit(unit)
[1819]156                    # do not reset to the default freqframe
157                    #self.set_freqframe(rcParams['scantable.freqframe'])
[1883]158                #elif os.path.isdir(filename) \
159                #         and not os.path.exists(filename+'/table.f1'):
160                elif is_ms(filename):
161                    self._fill([filename], unit, average, getpt, antenna)
[1893]162                elif os.path.isfile(filename):
163                    self._fill([filename], unit, average, getpt, antenna)
[1883]164                else:
[1819]165                    msg = "The given file '%s'is not a valid " \
166                          "asap table." % (filename)
[1859]167                    raise IOError(msg)
[1118]168            elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]169                  and isinstance(filename[-1], str):
[1819]170                self._fill(filename, unit, average, getpt, antenna)
[1586]171        self.parallactify(parallactify)
[1259]172        self._add_history("scantable", varlist)
[102]173
[1862]174    @asaplog_post_dec
[876]175    def save(self, name=None, format=None, overwrite=False):
[1846]176        """\
[1280]177        Store the scantable on disk. This can be an asap (aips++) Table,
178        SDFITS or MS2 format.
[1846]179
[116]180        Parameters:
[1846]181
[1093]182            name:        the name of the outputfile. For format "ASCII"
183                         this is the root file name (data in 'name'.txt
[497]184                         and header in 'name'_header.txt)
[1855]185
[116]186            format:      an optional file format. Default is ASAP.
[1855]187                         Allowed are:
188
189                            * 'ASAP' (save as ASAP [aips++] Table),
190                            * 'SDFITS' (save as SDFITS file)
191                            * 'ASCII' (saves as ascii text file)
192                            * 'MS2' (saves as an casacore MeasurementSet V2)
193                            * 'FITS' (save as image FITS - not readable by class)
194                            * 'CLASS' (save as FITS readable by CLASS)
195
[411]196            overwrite:   If the file should be overwritten if it exists.
[256]197                         The default False is to return with warning
[411]198                         without writing the output. USE WITH CARE.
[1855]199
[1846]200        Example::
201
[116]202            scan.save('myscan.asap')
[1118]203            scan.save('myscan.sdfits', 'SDFITS')
[1846]204
[116]205        """
[411]206        from os import path
[1593]207        format = format or rcParams['scantable.save']
[256]208        suffix = '.'+format.lower()
[1118]209        if name is None or name == "":
[256]210            name = 'scantable'+suffix
[718]211            msg = "No filename given. Using default name %s..." % name
212            asaplog.push(msg)
[411]213        name = path.expandvars(name)
[256]214        if path.isfile(name) or path.isdir(name):
215            if not overwrite:
[718]216                msg = "File %s exists." % name
[1859]217                raise IOError(msg)
[451]218        format2 = format.upper()
219        if format2 == 'ASAP':
[116]220            self._save(name)
221        else:
[989]222            from asap._asap import stwriter as stw
[1118]223            writer = stw(format2)
224            writer.write(self, name)
[116]225        return
226
[102]227    def copy(self):
[1846]228        """Return a copy of this scantable.
229
230        *Note*:
231
[1348]232            This makes a full (deep) copy. scan2 = scan1 makes a reference.
[1846]233
234        Example::
235
[102]236            copiedscan = scan.copy()
[1846]237
[102]238        """
[876]239        sd = scantable(Scantable._copy(self))
[113]240        return sd
241
[1093]242    def drop_scan(self, scanid=None):
[1846]243        """\
[1093]244        Return a new scantable where the specified scan number(s) has(have)
245        been dropped.
[1846]246
[1093]247        Parameters:
[1846]248
[1093]249            scanid:    a (list of) scan number(s)
[1846]250
[1093]251        """
252        from asap import _is_sequence_or_number as _is_valid
253        from asap import _to_list
254        from asap import unique
255        if not _is_valid(scanid):
[1859]256            raise RuntimeError( 'Please specify a scanno to drop from the scantable' )
257        scanid = _to_list(scanid)
258        allscans = unique([ self.getscan(i) for i in range(self.nrow())])
259        for sid in scanid: allscans.remove(sid)
260        if len(allscans) == 0:
261            raise ValueError("Can't remove all scans")
262        sel = selector(scans=allscans)
263        return self._select_copy(sel)
[1093]264
[1594]265    def _select_copy(self, selection):
266        orig = self.get_selection()
267        self.set_selection(orig+selection)
268        cp = self.copy()
269        self.set_selection(orig)
270        return cp
271
[102]272    def get_scan(self, scanid=None):
[1855]273        """\
[102]274        Return a specific scan (by scanno) or collection of scans (by
275        source name) in a new scantable.
[1846]276
277        *Note*:
278
[1348]279            See scantable.drop_scan() for the inverse operation.
[1846]280
[102]281        Parameters:
[1846]282
[513]283            scanid:    a (list of) scanno or a source name, unix-style
284                       patterns are accepted for source name matching, e.g.
285                       '*_R' gets all 'ref scans
[1846]286
287        Example::
288
[513]289            # get all scans containing the source '323p459'
290            newscan = scan.get_scan('323p459')
291            # get all 'off' scans
292            refscans = scan.get_scan('*_R')
293            # get a susbset of scans by scanno (as listed in scan.summary())
[1118]294            newscan = scan.get_scan([0, 2, 7, 10])
[1846]295
[102]296        """
297        if scanid is None:
[1859]298            raise RuntimeError( 'Please specify a scan no or name to '
299                                'retrieve from the scantable' )
[102]300        try:
[946]301            bsel = self.get_selection()
302            sel = selector()
[102]303            if type(scanid) is str:
[946]304                sel.set_name(scanid)
[1594]305                return self._select_copy(sel)
[102]306            elif type(scanid) is int:
[946]307                sel.set_scans([scanid])
[1594]308                return self._select_copy(sel)
[381]309            elif type(scanid) is list:
[946]310                sel.set_scans(scanid)
[1594]311                return self._select_copy(sel)
[381]312            else:
[718]313                msg = "Illegal scanid type, use 'int' or 'list' if ints."
[1859]314                raise TypeError(msg)
[102]315        except RuntimeError:
[1859]316            raise
[102]317
318    def __str__(self):
[1118]319        return Scantable._summary(self, True)
[102]320
[976]321    def summary(self, filename=None):
[1846]322        """\
[102]323        Print a summary of the contents of this scantable.
[1846]324
[102]325        Parameters:
[1846]326
[102]327            filename:    the name of a file to write the putput to
328                         Default - no file output
[1846]329
[102]330        """
[976]331        info = Scantable._summary(self, True)
[102]332        if filename is not None:
[256]333            if filename is "":
334                filename = 'scantable_summary.txt'
[415]335            from os.path import expandvars, isdir
[411]336            filename = expandvars(filename)
[415]337            if not isdir(filename):
[413]338                data = open(filename, 'w')
339                data.write(info)
340                data.close()
341            else:
[718]342                msg = "Illegal file name '%s'." % (filename)
[1859]343                raise IOError(msg)
344        return page(info)
[710]345
[1512]346    def get_spectrum(self, rowno):
[1471]347        """Return the spectrum for the current row in the scantable as a list.
[1846]348
[1471]349        Parameters:
[1846]350
[1573]351             rowno:   the row number to retrieve the spectrum from
[1846]352
[1471]353        """
354        return self._getspectrum(rowno)
[946]355
[1471]356    def get_mask(self, rowno):
357        """Return the mask for the current row in the scantable as a list.
[1846]358
[1471]359        Parameters:
[1846]360
[1573]361             rowno:   the row number to retrieve the mask from
[1846]362
[1471]363        """
364        return self._getmask(rowno)
365
366    def set_spectrum(self, spec, rowno):
367        """Return the spectrum for the current row in the scantable as a list.
[1846]368
[1471]369        Parameters:
[1846]370
[1855]371             spec:   the new spectrum
[1846]372
[1855]373             rowno:  the row number to set the spectrum for
374
[1471]375        """
376        assert(len(spec) == self.nchan())
377        return self._setspectrum(spec, rowno)
378
[1600]379    def get_coordinate(self, rowno):
380        """Return the (spectral) coordinate for a a given 'rowno'.
[1846]381
382        *Note*:
383
[1600]384            * This coordinate is only valid until a scantable method modifies
385              the frequency axis.
386            * This coordinate does contain the original frequency set-up
387              NOT the new frame. The conversions however are done using the user
388              specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
389              use scantable.freq_align first. Without it there is no closure,
[1846]390              i.e.::
[1600]391
[1846]392                  c = myscan.get_coordinate(0)
393                  c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
394
[1600]395        Parameters:
[1846]396
[1600]397             rowno:    the row number for the spectral coordinate
398
399        """
400        return coordinate(Scantable.get_coordinate(self, rowno))
401
[946]402    def get_selection(self):
[1846]403        """\
[1005]404        Get the selection object currently set on this scantable.
[1846]405
406        Example::
407
[1005]408            sel = scan.get_selection()
409            sel.set_ifs(0)              # select IF 0
410            scan.set_selection(sel)     # apply modified selection
[1846]411
[946]412        """
413        return selector(self._getselection())
414
[1576]415    def set_selection(self, selection=None, **kw):
[1846]416        """\
[1005]417        Select a subset of the data. All following operations on this scantable
418        are only applied to thi selection.
[1846]419
[1005]420        Parameters:
[1697]421
[1846]422            selection:    a selector object (default unset the selection), or
423                          any combination of "pols", "ifs", "beams", "scans",
424                          "cycles", "name", "query"
[1697]425
[1846]426        Examples::
[1697]427
[1005]428            sel = selector()         # create a selection object
[1118]429            self.set_scans([0, 3])    # select SCANNO 0 and 3
[1005]430            scan.set_selection(sel)  # set the selection
431            scan.summary()           # will only print summary of scanno 0 an 3
432            scan.set_selection()     # unset the selection
[1697]433            # or the equivalent
434            scan.set_selection(scans=[0,3])
435            scan.summary()           # will only print summary of scanno 0 an 3
436            scan.set_selection()     # unset the selection
[1846]437
[946]438        """
[1576]439        if selection is None:
440            # reset
441            if len(kw) == 0:
442                selection = selector()
443            else:
444                # try keywords
445                for k in kw:
446                    if k not in selector.fields:
447                        raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
448                selection = selector(**kw)
[946]449        self._setselection(selection)
450
[1819]451    def get_row(self, row=0, insitu=None):
[1846]452        """\
[1819]453        Select a row in the scantable.
454        Return a scantable with single row.
[1846]455
[1819]456        Parameters:
[1846]457
458            row:    row no of integration, default is 0.
459            insitu: if False a new scantable is returned. Otherwise, the
460                    scaling is done in-situ. The default is taken from .asaprc
461                    (False)
462
[1819]463        """
464        if insitu is None: insitu = rcParams['insitu']
465        if not insitu:
466            workscan = self.copy()
467        else:
468            workscan = self
469        # Select a row
470        sel=selector()
471        sel.set_scans([workscan.getscan(row)])
472        sel.set_cycles([workscan.getcycle(row)])
473        sel.set_beams([workscan.getbeam(row)])
474        sel.set_ifs([workscan.getif(row)])
475        sel.set_polarisations([workscan.getpol(row)])
476        sel.set_name(workscan._getsourcename(row))
477        workscan.set_selection(sel)
478        if not workscan.nrow() == 1:
479            msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
480            raise RuntimeError(msg)
481        del sel
482        if insitu:
483            self._assign(workscan)
484        else:
485            return workscan
486
[1862]487    @asaplog_post_dec
[1819]488    def stats(self, stat='stddev', mask=None, form='3.3f'):
[1846]489        """\
[135]490        Determine the specified statistic of the current beam/if/pol
[102]491        Takes a 'mask' as an optional parameter to specify which
492        channels should be excluded.
[1846]493
[102]494        Parameters:
[1846]495
[1819]496            stat:    'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
497                     'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
[1855]498
[135]499            mask:    an optional mask specifying where the statistic
[102]500                     should be determined.
[1855]501
[1819]502            form:    format string to print statistic values
[1846]503
504        Example::
505
[113]506            scan.set_unit('channel')
[1118]507            msk = scan.create_mask([100, 200], [500, 600])
[135]508            scan.stats(stat='mean', mask=m)
[1846]509
[102]510        """
[1593]511        mask = mask or []
[876]512        if not self._check_ifs():
[1118]513            raise ValueError("Cannot apply mask as the IFs have different "
514                             "number of channels. Please use setselection() "
515                             "to select individual IFs")
[1819]516        rtnabc = False
517        if stat.lower().endswith('_abc'): rtnabc = True
518        getchan = False
519        if stat.lower().startswith('min') or stat.lower().startswith('max'):
520            chan = self._math._minmaxchan(self, mask, stat)
521            getchan = True
522            statvals = []
523        if not rtnabc: statvals = self._math._stats(self, mask, stat)
[256]524
[1819]525        #def cb(i):
526        #    return statvals[i]
[256]527
[1819]528        #return self._row_callback(cb, stat)
[102]529
[1819]530        label=stat
531        #callback=cb
532        out = ""
533        #outvec = []
534        sep = '-'*50
535        for i in range(self.nrow()):
536            refstr = ''
537            statunit= ''
538            if getchan:
539                qx, qy = self.chan2data(rowno=i, chan=chan[i])
540                if rtnabc:
541                    statvals.append(qx['value'])
542                    refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
543                    statunit= '['+qx['unit']+']'
544                else:
545                    refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
546
547            tm = self._gettime(i)
548            src = self._getsourcename(i)
549            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
550            out += 'Time[%s]:\n' % (tm)
551            if self.nbeam(-1) > 1:
552                out +=  ' Beam[%d] ' % (self.getbeam(i))
553            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (self.getif(i))
554            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (self.getpol(i))
555            #outvec.append(callback(i))
556            #out += ('= %'+form) % (outvec[i]) +'   '+refstr+'\n'
557            out += ('= %'+form) % (statvals[i]) +'   '+refstr+'\n'
558            out +=  sep+"\n"
559
[1859]560
561        import os
562        if os.environ.has_key( 'USER' ):
563            usr = os.environ['USER']
564        else:
565            import commands
566            usr = commands.getoutput( 'whoami' )
567        tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
568        f = open(tmpfile,'w')
569        print >> f, sep
570        print >> f, ' %s %s' % (label, statunit)
571        print >> f, sep
572        print >> f, out
573        f.close()
574        f = open(tmpfile,'r')
575        x = f.readlines()
576        f.close()
577        asaplog.push(''.join(x), False)
578
[1819]579        return statvals
580
581    def chan2data(self, rowno=0, chan=0):
[1846]582        """\
[1819]583        Returns channel/frequency/velocity and spectral value
584        at an arbitrary row and channel in the scantable.
[1846]585
[1819]586        Parameters:
[1846]587
[1819]588            rowno:   a row number in the scantable. Default is the
589                     first row, i.e. rowno=0
[1855]590
[1819]591            chan:    a channel in the scantable. Default is the first
592                     channel, i.e. pos=0
[1846]593
[1819]594        """
595        if isinstance(rowno, int) and isinstance(chan, int):
596            qx = {'unit': self.get_unit(),
597                  'value': self._getabcissa(rowno)[chan]}
598            qy = {'unit': self.get_fluxunit(),
599                  'value': self._getspectrum(rowno)[chan]}
600            return qx, qy
601
[1118]602    def stddev(self, mask=None):
[1846]603        """\
[135]604        Determine the standard deviation of the current beam/if/pol
605        Takes a 'mask' as an optional parameter to specify which
606        channels should be excluded.
[1846]607
[135]608        Parameters:
[1846]609
[135]610            mask:    an optional mask specifying where the standard
611                     deviation should be determined.
612
[1846]613        Example::
614
[135]615            scan.set_unit('channel')
[1118]616            msk = scan.create_mask([100, 200], [500, 600])
[135]617            scan.stddev(mask=m)
[1846]618
[135]619        """
[1118]620        return self.stats(stat='stddev', mask=mask);
[135]621
[1003]622
[1259]623    def get_column_names(self):
[1846]624        """\
[1003]625        Return a  list of column names, which can be used for selection.
626        """
[1259]627        return list(Scantable.get_column_names(self))
[1003]628
[1730]629    def get_tsys(self, row=-1):
[1846]630        """\
[113]631        Return the System temperatures.
[1846]632
633        Parameters:
634
635            row:    the rowno to get the information for. (default all rows)
636
[113]637        Returns:
[1846]638
[876]639            a list of Tsys values for the current selection
[1846]640
[113]641        """
[1730]642        if row > -1:
643            return self._get_column(self._gettsys, row)
[876]644        return self._row_callback(self._gettsys, "Tsys")
[256]645
[1730]646
647    def get_weather(self, row=-1):
[1846]648        """\
649        Return the weather informations.
650
651        Parameters:
652
653            row:    the rowno to get the information for. (default all rows)
654
655        Returns:
656
657            a dict or list of of dicts of values for the current selection
658
659        """
660
[1730]661        values = self._get_column(self._get_weather, row)
662        if row > -1:
663            return {'temperature': values[0],
664                    'pressure': values[1], 'humidity' : values[2],
665                    'windspeed' : values[3], 'windaz' : values[4]
666                    }
667        else:
668            out = []
669            for r in values:
670
671                out.append({'temperature': r[0],
672                            'pressure': r[1], 'humidity' : r[2],
673                            'windspeed' : r[3], 'windaz' : r[4]
674                    })
675            return out
676
[876]677    def _row_callback(self, callback, label):
678        out = ""
[1118]679        outvec = []
[1590]680        sep = '-'*50
[876]681        for i in range(self.nrow()):
682            tm = self._gettime(i)
683            src = self._getsourcename(i)
[1590]684            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
[876]685            out += 'Time[%s]:\n' % (tm)
[1590]686            if self.nbeam(-1) > 1:
687                out +=  ' Beam[%d] ' % (self.getbeam(i))
688            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (self.getif(i))
689            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (self.getpol(i))
[876]690            outvec.append(callback(i))
691            out += '= %3.3f\n' % (outvec[i])
[1590]692            out +=  sep+'\n'
[1859]693
694        asaplog.push(sep)
695        asaplog.push(" %s" % (label))
696        asaplog.push(sep)
697        asaplog.push(out)
[1861]698        asaplog.post()
[1175]699        return outvec
[256]700
[1070]701    def _get_column(self, callback, row=-1):
702        """
703        """
704        if row == -1:
705            return [callback(i) for i in range(self.nrow())]
706        else:
[1819]707            if  0 <= row < self.nrow():
[1070]708                return callback(row)
[256]709
[1070]710
[1348]711    def get_time(self, row=-1, asdatetime=False):
[1846]712        """\
[113]713        Get a list of time stamps for the observations.
[1348]714        Return a datetime object for each integration time stamp in the scantable.
[1846]715
[113]716        Parameters:
[1846]717
[1348]718            row:          row no of integration. Default -1 return all rows
[1855]719
[1348]720            asdatetime:   return values as datetime objects rather than strings
[1846]721
[113]722        """
[1175]723        from time import strptime
724        from datetime import datetime
[1392]725        times = self._get_column(self._gettime, row)
[1348]726        if not asdatetime:
[1392]727            return times
[1175]728        format = "%Y/%m/%d/%H:%M:%S"
729        if isinstance(times, list):
730            return [datetime(*strptime(i, format)[:6]) for i in times]
731        else:
732            return datetime(*strptime(times, format)[:6])
[102]733
[1348]734
735    def get_inttime(self, row=-1):
[1846]736        """\
[1348]737        Get a list of integration times for the observations.
738        Return a time in seconds for each integration in the scantable.
[1846]739
[1348]740        Parameters:
[1846]741
[1348]742            row:    row no of integration. Default -1 return all rows.
[1846]743
[1348]744        """
[1573]745        return self._get_column(self._getinttime, row)
[1348]746
[1573]747
[714]748    def get_sourcename(self, row=-1):
[1846]749        """\
[794]750        Get a list source names for the observations.
[714]751        Return a string for each integration in the scantable.
752        Parameters:
[1846]753
[1348]754            row:    row no of integration. Default -1 return all rows.
[1846]755
[714]756        """
[1070]757        return self._get_column(self._getsourcename, row)
[714]758
[794]759    def get_elevation(self, row=-1):
[1846]760        """\
[794]761        Get a list of elevations for the observations.
762        Return a float for each integration in the scantable.
[1846]763
[794]764        Parameters:
[1846]765
[1348]766            row:    row no of integration. Default -1 return all rows.
[1846]767
[794]768        """
[1070]769        return self._get_column(self._getelevation, row)
[794]770
771    def get_azimuth(self, row=-1):
[1846]772        """\
[794]773        Get a list of azimuths for the observations.
774        Return a float for each integration in the scantable.
[1846]775
[794]776        Parameters:
[1348]777            row:    row no of integration. Default -1 return all rows.
[1846]778
[794]779        """
[1070]780        return self._get_column(self._getazimuth, row)
[794]781
782    def get_parangle(self, row=-1):
[1846]783        """\
[794]784        Get a list of parallactic angles for the observations.
785        Return a float for each integration in the scantable.
[1846]786
[794]787        Parameters:
[1846]788
[1348]789            row:    row no of integration. Default -1 return all rows.
[1846]790
[794]791        """
[1070]792        return self._get_column(self._getparangle, row)
[794]793
[1070]794    def get_direction(self, row=-1):
795        """
796        Get a list of Positions on the sky (direction) for the observations.
[1594]797        Return a string for each integration in the scantable.
[1855]798
[1070]799        Parameters:
[1855]800
[1070]801            row:    row no of integration. Default -1 return all rows
[1855]802
[1070]803        """
804        return self._get_column(self._getdirection, row)
805
[1391]806    def get_directionval(self, row=-1):
[1846]807        """\
[1391]808        Get a list of Positions on the sky (direction) for the observations.
809        Return a float for each integration in the scantable.
[1846]810
[1391]811        Parameters:
[1846]812
[1391]813            row:    row no of integration. Default -1 return all rows
[1846]814
[1391]815        """
816        return self._get_column(self._getdirectionvec, row)
817
[1862]818    @asaplog_post_dec
[102]819    def set_unit(self, unit='channel'):
[1846]820        """\
[102]821        Set the unit for all following operations on this scantable
[1846]822
[102]823        Parameters:
[1846]824
825            unit:    optional unit, default is 'channel'. Use one of '*Hz',
826                     'km/s', 'channel' or equivalent ''
827
[102]828        """
[484]829        varlist = vars()
[1118]830        if unit in ['', 'pixel', 'channel']:
[113]831            unit = ''
832        inf = list(self._getcoordinfo())
833        inf[0] = unit
834        self._setcoordinfo(inf)
[1118]835        self._add_history("set_unit", varlist)
[113]836
[1862]837    @asaplog_post_dec
[484]838    def set_instrument(self, instr):
[1846]839        """\
[1348]840        Set the instrument for subsequent processing.
[1846]841
[358]842        Parameters:
[1846]843
[710]844            instr:    Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]845                      'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[1846]846
[358]847        """
848        self._setInstrument(instr)
[1118]849        self._add_history("set_instument", vars())
[358]850
[1862]851    @asaplog_post_dec
[1190]852    def set_feedtype(self, feedtype):
[1846]853        """\
[1190]854        Overwrite the feed type, which might not be set correctly.
[1846]855
[1190]856        Parameters:
[1846]857
[1190]858            feedtype:     'linear' or 'circular'
[1846]859
[1190]860        """
861        self._setfeedtype(feedtype)
862        self._add_history("set_feedtype", vars())
863
[1862]864    @asaplog_post_dec
[276]865    def set_doppler(self, doppler='RADIO'):
[1846]866        """\
[276]867        Set the doppler for all following operations on this scantable.
[1846]868
[276]869        Parameters:
[1846]870
[276]871            doppler:    One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
[1846]872
[276]873        """
[484]874        varlist = vars()
[276]875        inf = list(self._getcoordinfo())
876        inf[2] = doppler
877        self._setcoordinfo(inf)
[1118]878        self._add_history("set_doppler", vars())
[710]879
[1862]880    @asaplog_post_dec
[226]881    def set_freqframe(self, frame=None):
[1846]882        """\
[113]883        Set the frame type of the Spectral Axis.
[1846]884
[113]885        Parameters:
[1846]886
[591]887            frame:   an optional frame type, default 'LSRK'. Valid frames are:
[1819]888                     'TOPO', 'LSRD', 'LSRK', 'BARY',
[1118]889                     'GEO', 'GALACTO', 'LGROUP', 'CMB'
[1846]890
891        Example::
892
[113]893            scan.set_freqframe('BARY')
[1846]894
[113]895        """
[1593]896        frame = frame or rcParams['scantable.freqframe']
[484]897        varlist = vars()
[1819]898        # "REST" is not implemented in casacore
899        #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
900        #           'GEO', 'GALACTO', 'LGROUP', 'CMB']
901        valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
[1118]902                   'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]903
[989]904        if frame in valid:
[113]905            inf = list(self._getcoordinfo())
906            inf[1] = frame
907            self._setcoordinfo(inf)
[1118]908            self._add_history("set_freqframe", varlist)
[102]909        else:
[1118]910            msg  = "Please specify a valid freq type. Valid types are:\n", valid
[1859]911            raise TypeError(msg)
[710]912
[1862]913    @asaplog_post_dec
[989]914    def set_dirframe(self, frame=""):
[1846]915        """\
[989]916        Set the frame type of the Direction on the sky.
[1846]917
[989]918        Parameters:
[1846]919
[989]920            frame:   an optional frame type, default ''. Valid frames are:
921                     'J2000', 'B1950', 'GALACTIC'
[1846]922
923        Example:
924
[989]925            scan.set_dirframe('GALACTIC')
[1846]926
[989]927        """
928        varlist = vars()
[1859]929        Scantable.set_dirframe(self, frame)
[1118]930        self._add_history("set_dirframe", varlist)
[989]931
[113]932    def get_unit(self):
[1846]933        """\
[113]934        Get the default unit set in this scantable
[1846]935
[113]936        Returns:
[1846]937
[113]938            A unit string
[1846]939
[113]940        """
941        inf = self._getcoordinfo()
942        unit = inf[0]
943        if unit == '': unit = 'channel'
944        return unit
[102]945
[1862]946    @asaplog_post_dec
[158]947    def get_abcissa(self, rowno=0):
[1846]948        """\
[158]949        Get the abcissa in the current coordinate setup for the currently
[113]950        selected Beam/IF/Pol
[1846]951
[113]952        Parameters:
[1846]953
[226]954            rowno:    an optional row number in the scantable. Default is the
955                      first row, i.e. rowno=0
[1846]956
[113]957        Returns:
[1846]958
[1348]959            The abcissa values and the format string (as a dictionary)
[1846]960
[113]961        """
[256]962        abc = self._getabcissa(rowno)
[710]963        lbl = self._getabcissalabel(rowno)
[158]964        return abc, lbl
[113]965
[1862]966    @asaplog_post_dec
[1819]967    def flag(self, mask=None, unflag=False):
[1846]968        """\
[1001]969        Flag the selected data using an optional channel mask.
[1846]970
[1001]971        Parameters:
[1846]972
[1001]973            mask:   an optional channel mask, created with create_mask. Default
974                    (no mask) is all channels.
[1855]975
[1819]976            unflag:    if True, unflag the data
[1846]977
[1001]978        """
979        varlist = vars()
[1593]980        mask = mask or []
[1859]981        self._flag(mask, unflag)
[1001]982        self._add_history("flag", varlist)
983
[1862]984    @asaplog_post_dec
[1819]985    def flag_row(self, rows=[], unflag=False):
[1846]986        """\
[1819]987        Flag the selected data in row-based manner.
[1846]988
[1819]989        Parameters:
[1846]990
[1843]991            rows:   list of row numbers to be flagged. Default is no row
992                    (must be explicitly specified to execute row-based flagging).
[1855]993
[1819]994            unflag: if True, unflag the data.
[1846]995
[1819]996        """
997        varlist = vars()
[1859]998        self._flag_row(rows, unflag)
[1819]999        self._add_history("flag_row", varlist)
1000
[1862]1001    @asaplog_post_dec
[1819]1002    def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
[1846]1003        """\
[1819]1004        Flag the selected data outside a specified range (in channel-base)
[1846]1005
[1819]1006        Parameters:
[1846]1007
[1819]1008            uthres:      upper threshold.
[1855]1009
[1819]1010            dthres:      lower threshold
[1846]1011
[1819]1012            clipoutside: True for flagging data outside the range [dthres:uthres].
1013                         False for glagging data inside the range.
[1855]1014
[1846]1015            unflag:      if True, unflag the data.
1016
[1819]1017        """
1018        varlist = vars()
[1859]1019        self._clip(uthres, dthres, clipoutside, unflag)
[1819]1020        self._add_history("clip", varlist)
1021
[1862]1022    @asaplog_post_dec
[1584]1023    def lag_flag(self, start, end, unit="MHz", insitu=None):
[1846]1024        """\
[1192]1025        Flag the data in 'lag' space by providing a frequency to remove.
[1584]1026        Flagged data in the scantable gets interpolated over the region.
[1192]1027        No taper is applied.
[1846]1028
[1192]1029        Parameters:
[1846]1030
[1579]1031            start:    the start frequency (really a period within the
1032                      bandwidth)  or period to remove
[1855]1033
[1579]1034            end:      the end frequency or period to remove
[1855]1035
[1584]1036            unit:     the frequency unit (default "MHz") or "" for
[1579]1037                      explicit lag channels
[1846]1038
1039        *Notes*:
1040
[1579]1041            It is recommended to flag edges of the band or strong
[1348]1042            signals beforehand.
[1846]1043
[1192]1044        """
1045        if insitu is None: insitu = rcParams['insitu']
1046        self._math._setinsitu(insitu)
1047        varlist = vars()
[1579]1048        base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1049        if not (unit == "" or base.has_key(unit)):
[1192]1050            raise ValueError("%s is not a valid unit." % unit)
[1859]1051        if unit == "":
1052            s = scantable(self._math._lag_flag(self, start, end, "lags"))
1053        else:
1054            s = scantable(self._math._lag_flag(self, start*base[unit],
1055                                               end*base[unit], "frequency"))
[1192]1056        s._add_history("lag_flag", varlist)
1057        if insitu:
1058            self._assign(s)
1059        else:
1060            return s
[1001]1061
[1862]1062    @asaplog_post_dec
[113]1063    def create_mask(self, *args, **kwargs):
[1846]1064        """\
[1118]1065        Compute and return a mask based on [min, max] windows.
[189]1066        The specified windows are to be INCLUDED, when the mask is
[113]1067        applied.
[1846]1068
[102]1069        Parameters:
[1846]1070
[1118]1071            [min, max], [min2, max2], ...
[1024]1072                Pairs of start/end points (inclusive)specifying the regions
[102]1073                to be masked
[1855]1074
[189]1075            invert:     optional argument. If specified as True,
1076                        return an inverted mask, i.e. the regions
1077                        specified are EXCLUDED
[1855]1078
[513]1079            row:        create the mask using the specified row for
1080                        unit conversions, default is row=0
1081                        only necessary if frequency varies over rows.
[1846]1082
1083        Examples::
1084
[113]1085            scan.set_unit('channel')
[1846]1086            # a)
[1118]1087            msk = scan.create_mask([400, 500], [800, 900])
[189]1088            # masks everything outside 400 and 500
[113]1089            # and 800 and 900 in the unit 'channel'
1090
[1846]1091            # b)
[1118]1092            msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]1093            # masks the regions between 400 and 500
[113]1094            # and 800 and 900 in the unit 'channel'
[1846]1095
1096            # c)
1097            #mask only channel 400
[1554]1098            msk =  scan.create_mask([400])
[1846]1099
[102]1100        """
[1554]1101        row = kwargs.get("row", 0)
[513]1102        data = self._getabcissa(row)
[113]1103        u = self._getcoordinfo()[0]
[1859]1104        if u == "":
1105            u = "channel"
1106        msg = "The current mask window unit is %s" % u
1107        i = self._check_ifs()
1108        if not i:
1109            msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1110        asaplog.push(msg)
[102]1111        n = self.nchan()
[1295]1112        msk = _n_bools(n, False)
[710]1113        # test if args is a 'list' or a 'normal *args - UGLY!!!
1114
[1118]1115        ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1116             and args or args[0]
[710]1117        for window in ws:
[1554]1118            if len(window) == 1:
1119                window = [window[0], window[0]]
1120            if len(window) == 0 or  len(window) > 2:
1121                raise ValueError("A window needs to be defined as [start(, end)]")
[1545]1122            if window[0] > window[1]:
1123                tmp = window[0]
1124                window[0] = window[1]
1125                window[1] = tmp
[102]1126            for i in range(n):
[1024]1127                if data[i] >= window[0] and data[i] <= window[1]:
[1295]1128                    msk[i] = True
[113]1129        if kwargs.has_key('invert'):
1130            if kwargs.get('invert'):
[1295]1131                msk = mask_not(msk)
[102]1132        return msk
[710]1133
[1819]1134    def get_masklist(self, mask=None, row=0):
[1846]1135        """\
[1819]1136        Compute and return a list of mask windows, [min, max].
[1846]1137
[1819]1138        Parameters:
[1846]1139
[1819]1140            mask:       channel mask, created with create_mask.
[1855]1141
[1819]1142            row:        calcutate the masklist using the specified row
1143                        for unit conversions, default is row=0
1144                        only necessary if frequency varies over rows.
[1846]1145
[1819]1146        Returns:
[1846]1147
[1819]1148            [min, max], [min2, max2], ...
1149                Pairs of start/end points (inclusive)specifying
1150                the masked regions
[1846]1151
[1819]1152        """
1153        if not (isinstance(mask,list) or isinstance(mask, tuple)):
1154            raise TypeError("The mask should be list or tuple.")
1155        if len(mask) < 2:
1156            raise TypeError("The mask elements should be > 1")
1157        if self.nchan() != len(mask):
1158            msg = "Number of channels in scantable != number of mask elements"
1159            raise TypeError(msg)
1160        data = self._getabcissa(row)
1161        u = self._getcoordinfo()[0]
[1859]1162        if u == "":
1163            u = "channel"
1164        msg = "The current mask window unit is %s" % u
1165        i = self._check_ifs()
1166        if not i:
1167            msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1168        asaplog.push(msg)
[1819]1169        masklist=[]
1170        ist, ien = None, None
1171        ist, ien=self.get_mask_indices(mask)
1172        if ist is not None and ien is not None:
1173            for i in xrange(len(ist)):
1174                range=[data[ist[i]],data[ien[i]]]
1175                range.sort()
1176                masklist.append([range[0],range[1]])
1177        return masklist
1178
1179    def get_mask_indices(self, mask=None):
[1846]1180        """\
[1819]1181        Compute and Return lists of mask start indices and mask end indices.
[1855]1182
1183        Parameters:
1184
[1819]1185            mask:       channel mask, created with create_mask.
[1846]1186
[1819]1187        Returns:
[1846]1188
[1819]1189            List of mask start indices and that of mask end indices,
1190            i.e., [istart1,istart2,....], [iend1,iend2,....].
[1846]1191
[1819]1192        """
1193        if not (isinstance(mask,list) or isinstance(mask, tuple)):
1194            raise TypeError("The mask should be list or tuple.")
1195        if len(mask) < 2:
1196            raise TypeError("The mask elements should be > 1")
1197        istart=[]
1198        iend=[]
1199        if mask[0]: istart.append(0)
1200        for i in range(len(mask)-1):
1201            if not mask[i] and mask[i+1]:
1202                istart.append(i+1)
1203            elif mask[i] and not mask[i+1]:
1204                iend.append(i)
1205        if mask[len(mask)-1]: iend.append(len(mask)-1)
1206        if len(istart) != len(iend):
1207            raise RuntimeError("Numbers of mask start != mask end.")
1208        for i in range(len(istart)):
1209            if istart[i] > iend[i]:
1210                raise RuntimeError("Mask start index > mask end index")
1211                break
1212        return istart,iend
1213
1214#    def get_restfreqs(self):
1215#        """
1216#        Get the restfrequency(s) stored in this scantable.
1217#        The return value(s) are always of unit 'Hz'
1218#        Parameters:
1219#            none
1220#        Returns:
1221#            a list of doubles
1222#        """
1223#        return list(self._getrestfreqs())
1224
1225    def get_restfreqs(self, ids=None):
[1846]1226        """\
[256]1227        Get the restfrequency(s) stored in this scantable.
1228        The return value(s) are always of unit 'Hz'
[1846]1229
[256]1230        Parameters:
[1846]1231
[1819]1232            ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1233                 be retrieved
[1846]1234
[256]1235        Returns:
[1846]1236
[1819]1237            dictionary containing ids and a list of doubles for each id
[1846]1238
[256]1239        """
[1819]1240        if ids is None:
1241            rfreqs={}
1242            idlist = self.getmolnos()
1243            for i in idlist:
1244                rfreqs[i]=list(self._getrestfreqs(i))
1245            return rfreqs
1246        else:
1247            if type(ids)==list or type(ids)==tuple:
1248                rfreqs={}
1249                for i in ids:
1250                    rfreqs[i]=list(self._getrestfreqs(i))
1251                return rfreqs
1252            else:
1253                return list(self._getrestfreqs(ids))
1254            #return list(self._getrestfreqs(ids))
[102]1255
[931]1256    def set_restfreqs(self, freqs=None, unit='Hz'):
[1846]1257        """\
[931]1258        Set or replace the restfrequency specified and
1259        If the 'freqs' argument holds a scalar,
1260        then that rest frequency will be applied to all the selected
1261        data.  If the 'freqs' argument holds
1262        a vector, then it MUST be of equal or smaller length than
1263        the number of IFs (and the available restfrequencies will be
1264        replaced by this vector).  In this case, *all* data have
1265        the restfrequency set per IF according
1266        to the corresponding value you give in the 'freqs' vector.
[1118]1267        E.g. 'freqs=[1e9, 2e9]'  would mean IF 0 gets restfreq 1e9 and
[931]1268        IF 1 gets restfreq 2e9.
[1846]1269
[1395]1270        You can also specify the frequencies via a linecatalog.
[1153]1271
[931]1272        Parameters:
[1846]1273
[931]1274            freqs:   list of rest frequency values or string idenitfiers
[1855]1275
[931]1276            unit:    unit for rest frequency (default 'Hz')
[402]1277
[1846]1278
1279        Example::
1280
[1819]1281            # set the given restfrequency for the all currently selected IFs
[931]1282            scan.set_restfreqs(freqs=1.4e9)
[1845]1283            # set restfrequencies for the n IFs  (n > 1) in the order of the
1284            # list, i.e
1285            # IF0 -> 1.4e9, IF1 ->  1.41e9, IF3 -> 1.42e9
1286            # len(list_of_restfreqs) == nIF
1287            # for nIF == 1 the following will set multiple restfrequency for
1288            # that IF
[1819]1289            scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
[1845]1290            # set multiple restfrequencies per IF. as a list of lists where
1291            # the outer list has nIF elements, the inner s arbitrary
1292            scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
[391]1293
[1846]1294       *Note*:
[1845]1295
[931]1296            To do more sophisticate Restfrequency setting, e.g. on a
1297            source and IF basis, use scantable.set_selection() before using
[1846]1298            this function::
[931]1299
[1846]1300                # provided your scantable is called scan
1301                selection = selector()
1302                selection.set_name("ORION*")
1303                selection.set_ifs([1])
1304                scan.set_selection(selection)
1305                scan.set_restfreqs(freqs=86.6e9)
1306
[931]1307        """
1308        varlist = vars()
[1157]1309        from asap import linecatalog
1310        # simple  value
[1118]1311        if isinstance(freqs, int) or isinstance(freqs, float):
[1845]1312            self._setrestfreqs([freqs], [""], unit)
[1157]1313        # list of values
[1118]1314        elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]1315            # list values are scalars
[1118]1316            if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[1845]1317                if len(freqs) == 1:
1318                    self._setrestfreqs(freqs, [""], unit)
1319                else:
1320                    # allow the 'old' mode of setting mulitple IFs
1321                    sel = selector()
1322                    savesel = self._getselection()
1323                    iflist = self.getifnos()
1324                    if len(freqs)>len(iflist):
1325                        raise ValueError("number of elements in list of list "
1326                                         "exeeds the current IF selections")
1327                    iflist = self.getifnos()
1328                    for i, fval in enumerate(freqs):
1329                        sel.set_ifs(iflist[i])
1330                        self._setselection(sel)
1331                        self._setrestfreqs([fval], [""], unit)
1332                    self._setselection(savesel)
1333
1334            # list values are dict, {'value'=, 'name'=)
[1157]1335            elif isinstance(freqs[-1], dict):
[1845]1336                values = []
1337                names = []
1338                for d in freqs:
1339                    values.append(d["value"])
1340                    names.append(d["name"])
1341                self._setrestfreqs(values, names, unit)
[1819]1342            elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
[1157]1343                sel = selector()
1344                savesel = self._getselection()
[1322]1345                iflist = self.getifnos()
[1819]1346                if len(freqs)>len(iflist):
[1845]1347                    raise ValueError("number of elements in list of list exeeds"
1348                                     " the current IF selections")
1349                for i, fval in enumerate(freqs):
[1322]1350                    sel.set_ifs(iflist[i])
[1259]1351                    self._setselection(sel)
[1845]1352                    self._setrestfreqs(fval, [""], unit)
[1157]1353                self._setselection(savesel)
1354        # freqs are to be taken from a linecatalog
[1153]1355        elif isinstance(freqs, linecatalog):
1356            sel = selector()
1357            savesel = self._getselection()
1358            for i in xrange(freqs.nrow()):
[1322]1359                sel.set_ifs(iflist[i])
[1153]1360                self._setselection(sel)
[1845]1361                self._setrestfreqs([freqs.get_frequency(i)],
1362                                   [freqs.get_name(i)], "MHz")
[1153]1363                # ensure that we are not iterating past nIF
1364                if i == self.nif()-1: break
1365            self._setselection(savesel)
[931]1366        else:
1367            return
1368        self._add_history("set_restfreqs", varlist)
1369
[1360]1370    def shift_refpix(self, delta):
[1846]1371        """\
[1589]1372        Shift the reference pixel of the Spectra Coordinate by an
1373        integer amount.
[1846]1374
[1589]1375        Parameters:
[1846]1376
[1589]1377            delta:   the amount to shift by
[1846]1378
1379        *Note*:
1380
[1589]1381            Be careful using this with broadband data.
[1846]1382
[1360]1383        """
[1731]1384        Scantable.shift_refpix(self, delta)
[931]1385
[1862]1386    @asaplog_post_dec
[1259]1387    def history(self, filename=None):
[1846]1388        """\
[1259]1389        Print the history. Optionally to a file.
[1846]1390
[1348]1391        Parameters:
[1846]1392
[1348]1393            filename:    The name  of the file to save the history to.
[1846]1394
[1259]1395        """
[484]1396        hist = list(self._gethistory())
[794]1397        out = "-"*80
[484]1398        for h in hist:
[489]1399            if h.startswith("---"):
[1857]1400                out = "\n".join([out, h])
[489]1401            else:
1402                items = h.split("##")
1403                date = items[0]
1404                func = items[1]
1405                items = items[2:]
[794]1406                out += "\n"+date+"\n"
1407                out += "Function: %s\n  Parameters:" % (func)
[489]1408                for i in items:
1409                    s = i.split("=")
[1118]1410                    out += "\n   %s = %s" % (s[0], s[1])
[1857]1411                out = "\n".join([out, "-"*80])
[1259]1412        if filename is not None:
1413            if filename is "":
1414                filename = 'scantable_history.txt'
1415            import os
1416            filename = os.path.expandvars(os.path.expanduser(filename))
1417            if not os.path.isdir(filename):
1418                data = open(filename, 'w')
1419                data.write(out)
1420                data.close()
1421            else:
1422                msg = "Illegal file name '%s'." % (filename)
[1859]1423                raise IOError(msg)
1424        return page(out)
[513]1425    #
1426    # Maths business
1427    #
[1862]1428    @asaplog_post_dec
[931]1429    def average_time(self, mask=None, scanav=False, weight='tint', align=False):
[1846]1430        """\
[1070]1431        Return the (time) weighted average of a scan.
[1846]1432
1433        *Note*:
1434
[1070]1435            in channels only - align if necessary
[1846]1436
[513]1437        Parameters:
[1846]1438
[513]1439            mask:     an optional mask (only used for 'var' and 'tsys'
1440                      weighting)
[1855]1441
[558]1442            scanav:   True averages each scan separately
1443                      False (default) averages all scans together,
[1855]1444
[1099]1445            weight:   Weighting scheme.
1446                      'none'     (mean no weight)
1447                      'var'      (1/var(spec) weighted)
1448                      'tsys'     (1/Tsys**2 weighted)
1449                      'tint'     (integration time weighted)
1450                      'tintsys'  (Tint/Tsys**2)
1451                      'median'   ( median averaging)
[535]1452                      The default is 'tint'
[1855]1453
[931]1454            align:    align the spectra in velocity before averaging. It takes
1455                      the time of the first spectrum as reference time.
[1846]1456
1457        Example::
1458
[513]1459            # time average the scantable without using a mask
[710]1460            newscan = scan.average_time()
[1846]1461
[513]1462        """
1463        varlist = vars()
[1593]1464        weight = weight or 'TINT'
1465        mask = mask or ()
1466        scanav = (scanav and 'SCAN') or 'NONE'
[1118]1467        scan = (self, )
[1859]1468
1469        if align:
1470            scan = (self.freq_align(insitu=False), )
1471        s = None
1472        if weight.upper() == 'MEDIAN':
1473            s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1474                                                     scanav))
1475        else:
1476            s = scantable(self._math._average(scan, mask, weight.upper(),
1477                          scanav))
[1099]1478        s._add_history("average_time", varlist)
[513]1479        return s
[710]1480
[1862]1481    @asaplog_post_dec
[876]1482    def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[1846]1483        """\
[513]1484        Return a scan where all spectra are converted to either
1485        Jansky or Kelvin depending upon the flux units of the scan table.
1486        By default the function tries to look the values up internally.
1487        If it can't find them (or if you want to over-ride), you must
1488        specify EITHER jyperk OR eta (and D which it will try to look up
1489        also if you don't set it). jyperk takes precedence if you set both.
[1846]1490
[513]1491        Parameters:
[1846]1492
[513]1493            jyperk:      the Jy / K conversion factor
[1855]1494
[513]1495            eta:         the aperture efficiency
[1855]1496
[513]1497            d:           the geomtric diameter (metres)
[1855]1498
[513]1499            insitu:      if False a new scantable is returned.
1500                         Otherwise, the scaling is done in-situ
1501                         The default is taken from .asaprc (False)
[1846]1502
[513]1503        """
1504        if insitu is None: insitu = rcParams['insitu']
[876]1505        self._math._setinsitu(insitu)
[513]1506        varlist = vars()
[1593]1507        jyperk = jyperk or -1.0
1508        d = d or -1.0
1509        eta = eta or -1.0
[876]1510        s = scantable(self._math._convertflux(self, d, eta, jyperk))
1511        s._add_history("convert_flux", varlist)
1512        if insitu: self._assign(s)
1513        else: return s
[513]1514
[1862]1515    @asaplog_post_dec
[876]1516    def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[1846]1517        """\
[513]1518        Return a scan after applying a gain-elevation correction.
1519        The correction can be made via either a polynomial or a
1520        table-based interpolation (and extrapolation if necessary).
1521        You specify polynomial coefficients, an ascii table or neither.
1522        If you specify neither, then a polynomial correction will be made
1523        with built in coefficients known for certain telescopes (an error
1524        will occur if the instrument is not known).
1525        The data and Tsys are *divided* by the scaling factors.
[1846]1526
[513]1527        Parameters:
[1846]1528
[513]1529            poly:        Polynomial coefficients (default None) to compute a
1530                         gain-elevation correction as a function of
1531                         elevation (in degrees).
[1855]1532
[513]1533            filename:    The name of an ascii file holding correction factors.
1534                         The first row of the ascii file must give the column
1535                         names and these MUST include columns
1536                         "ELEVATION" (degrees) and "FACTOR" (multiply data
1537                         by this) somewhere.
1538                         The second row must give the data type of the
1539                         column. Use 'R' for Real and 'I' for Integer.
1540                         An example file would be
1541                         (actual factors are arbitrary) :
1542
1543                         TIME ELEVATION FACTOR
1544                         R R R
1545                         0.1 0 0.8
1546                         0.2 20 0.85
1547                         0.3 40 0.9
1548                         0.4 60 0.85
1549                         0.5 80 0.8
1550                         0.6 90 0.75
[1855]1551
[513]1552            method:      Interpolation method when correcting from a table.
1553                         Values are  "nearest", "linear" (default), "cubic"
1554                         and "spline"
[1855]1555
[513]1556            insitu:      if False a new scantable is returned.
1557                         Otherwise, the scaling is done in-situ
1558                         The default is taken from .asaprc (False)
[1846]1559
[513]1560        """
1561
1562        if insitu is None: insitu = rcParams['insitu']
[876]1563        self._math._setinsitu(insitu)
[513]1564        varlist = vars()
[1593]1565        poly = poly or ()
[513]1566        from os.path import expandvars
1567        filename = expandvars(filename)
[876]1568        s = scantable(self._math._gainel(self, poly, filename, method))
1569        s._add_history("gain_el", varlist)
[1593]1570        if insitu:
1571            self._assign(s)
1572        else:
1573            return s
[710]1574
[1862]1575    @asaplog_post_dec
[931]1576    def freq_align(self, reftime=None, method='cubic', insitu=None):
[1846]1577        """\
[513]1578        Return a scan where all rows have been aligned in frequency/velocity.
1579        The alignment frequency frame (e.g. LSRK) is that set by function
1580        set_freqframe.
[1846]1581
[513]1582        Parameters:
[1855]1583
[513]1584            reftime:     reference time to align at. By default, the time of
1585                         the first row of data is used.
[1855]1586
[513]1587            method:      Interpolation method for regridding the spectra.
1588                         Choose from "nearest", "linear", "cubic" (default)
1589                         and "spline"
[1855]1590
[513]1591            insitu:      if False a new scantable is returned.
1592                         Otherwise, the scaling is done in-situ
1593                         The default is taken from .asaprc (False)
[1846]1594
[513]1595        """
[931]1596        if insitu is None: insitu = rcParams["insitu"]
[876]1597        self._math._setinsitu(insitu)
[513]1598        varlist = vars()
[1593]1599        reftime = reftime or ""
[931]1600        s = scantable(self._math._freq_align(self, reftime, method))
[876]1601        s._add_history("freq_align", varlist)
1602        if insitu: self._assign(s)
1603        else: return s
[513]1604
[1862]1605    @asaplog_post_dec
[1725]1606    def opacity(self, tau=None, insitu=None):
[1846]1607        """\
[513]1608        Apply an opacity correction. The data
1609        and Tsys are multiplied by the correction factor.
[1846]1610
[513]1611        Parameters:
[1855]1612
[1689]1613            tau:         (list of) opacity from which the correction factor is
[513]1614                         exp(tau*ZD)
[1689]1615                         where ZD is the zenith-distance.
1616                         If a list is provided, it has to be of length nIF,
1617                         nIF*nPol or 1 and in order of IF/POL, e.g.
1618                         [opif0pol0, opif0pol1, opif1pol0 ...]
[1725]1619                         if tau is `None` the opacities are determined from a
1620                         model.
[1855]1621
[513]1622            insitu:      if False a new scantable is returned.
1623                         Otherwise, the scaling is done in-situ
1624                         The default is taken from .asaprc (False)
[1846]1625
[513]1626        """
1627        if insitu is None: insitu = rcParams['insitu']
[876]1628        self._math._setinsitu(insitu)
[513]1629        varlist = vars()
[1689]1630        if not hasattr(tau, "__len__"):
1631            tau = [tau]
[876]1632        s = scantable(self._math._opacity(self, tau))
1633        s._add_history("opacity", varlist)
1634        if insitu: self._assign(s)
1635        else: return s
[513]1636
[1862]1637    @asaplog_post_dec
[513]1638    def bin(self, width=5, insitu=None):
[1846]1639        """\
[513]1640        Return a scan where all spectra have been binned up.
[1846]1641
[1348]1642        Parameters:
[1846]1643
[513]1644            width:       The bin width (default=5) in pixels
[1855]1645
[513]1646            insitu:      if False a new scantable is returned.
1647                         Otherwise, the scaling is done in-situ
1648                         The default is taken from .asaprc (False)
[1846]1649
[513]1650        """
1651        if insitu is None: insitu = rcParams['insitu']
[876]1652        self._math._setinsitu(insitu)
[513]1653        varlist = vars()
[876]1654        s = scantable(self._math._bin(self, width))
[1118]1655        s._add_history("bin", varlist)
[1589]1656        if insitu:
1657            self._assign(s)
1658        else:
1659            return s
[513]1660
[1862]1661    @asaplog_post_dec
[513]1662    def resample(self, width=5, method='cubic', insitu=None):
[1846]1663        """\
[1348]1664        Return a scan where all spectra have been binned up.
[1573]1665
[1348]1666        Parameters:
[1846]1667
[513]1668            width:       The bin width (default=5) in pixels
[1855]1669
[513]1670            method:      Interpolation method when correcting from a table.
1671                         Values are  "nearest", "linear", "cubic" (default)
1672                         and "spline"
[1855]1673
[513]1674            insitu:      if False a new scantable is returned.
1675                         Otherwise, the scaling is done in-situ
1676                         The default is taken from .asaprc (False)
[1846]1677
[513]1678        """
1679        if insitu is None: insitu = rcParams['insitu']
[876]1680        self._math._setinsitu(insitu)
[513]1681        varlist = vars()
[876]1682        s = scantable(self._math._resample(self, method, width))
[1118]1683        s._add_history("resample", varlist)
[876]1684        if insitu: self._assign(s)
1685        else: return s
[513]1686
[1862]1687    @asaplog_post_dec
[946]1688    def average_pol(self, mask=None, weight='none'):
[1846]1689        """\
[946]1690        Average the Polarisations together.
[1846]1691
[946]1692        Parameters:
[1846]1693
[946]1694            mask:        An optional mask defining the region, where the
1695                         averaging will be applied. The output will have all
1696                         specified points masked.
[1855]1697
[946]1698            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1699                         weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]1700
[946]1701        """
1702        varlist = vars()
[1593]1703        mask = mask or ()
[1010]1704        s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]1705        s._add_history("average_pol", varlist)
[992]1706        return s
[513]1707
[1862]1708    @asaplog_post_dec
[1145]1709    def average_beam(self, mask=None, weight='none'):
[1846]1710        """\
[1145]1711        Average the Beams together.
[1846]1712
[1145]1713        Parameters:
1714            mask:        An optional mask defining the region, where the
1715                         averaging will be applied. The output will have all
1716                         specified points masked.
[1855]1717
[1145]1718            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1719                         weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]1720
[1145]1721        """
1722        varlist = vars()
[1593]1723        mask = mask or ()
[1145]1724        s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1725        s._add_history("average_beam", varlist)
1726        return s
1727
[1586]1728    def parallactify(self, pflag):
[1846]1729        """\
[1843]1730        Set a flag to indicate whether this data should be treated as having
[1617]1731        been 'parallactified' (total phase == 0.0)
[1846]1732
[1617]1733        Parameters:
[1855]1734
[1843]1735            pflag:  Bool indicating whether to turn this on (True) or
[1617]1736                    off (False)
[1846]1737
[1617]1738        """
[1586]1739        varlist = vars()
1740        self._parallactify(pflag)
1741        self._add_history("parallactify", varlist)
1742
[1862]1743    @asaplog_post_dec
[992]1744    def convert_pol(self, poltype=None):
[1846]1745        """\
[992]1746        Convert the data to a different polarisation type.
[1565]1747        Note that you will need cross-polarisation terms for most conversions.
[1846]1748
[992]1749        Parameters:
[1855]1750
[992]1751            poltype:    The new polarisation type. Valid types are:
[1565]1752                        "linear", "circular", "stokes" and "linpol"
[1846]1753
[992]1754        """
1755        varlist = vars()
[1859]1756        s = scantable(self._math._convertpol(self, poltype))
[1118]1757        s._add_history("convert_pol", varlist)
[992]1758        return s
1759
[1862]1760    @asaplog_post_dec
[1819]1761    def smooth(self, kernel="hanning", width=5.0, order=2, plot=False, insitu=None):
[1846]1762        """\
[513]1763        Smooth the spectrum by the specified kernel (conserving flux).
[1846]1764
[513]1765        Parameters:
[1846]1766
[513]1767            kernel:     The type of smoothing kernel. Select from
[1574]1768                        'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1769                        or 'poly'
[1855]1770
[513]1771            width:      The width of the kernel in pixels. For hanning this is
1772                        ignored otherwise it defauls to 5 pixels.
1773                        For 'gaussian' it is the Full Width Half
1774                        Maximum. For 'boxcar' it is the full width.
[1574]1775                        For 'rmedian' and 'poly' it is the half width.
[1855]1776
[1574]1777            order:      Optional parameter for 'poly' kernel (default is 2), to
1778                        specify the order of the polnomial. Ignored by all other
1779                        kernels.
[1855]1780
[1819]1781            plot:       plot the original and the smoothed spectra.
1782                        In this each indivual fit has to be approved, by
1783                        typing 'y' or 'n'
[1855]1784
[513]1785            insitu:     if False a new scantable is returned.
1786                        Otherwise, the scaling is done in-situ
1787                        The default is taken from .asaprc (False)
[1846]1788
[513]1789        """
1790        if insitu is None: insitu = rcParams['insitu']
[876]1791        self._math._setinsitu(insitu)
[513]1792        varlist = vars()
[1819]1793
1794        if plot: orgscan = self.copy()
1795
[1574]1796        s = scantable(self._math._smooth(self, kernel.lower(), width, order))
[876]1797        s._add_history("smooth", varlist)
[1819]1798
1799        if plot:
1800            if rcParams['plotter.gui']:
1801                from asap.asaplotgui import asaplotgui as asaplot
1802            else:
1803                from asap.asaplot import asaplot
1804            self._p=asaplot()
1805            self._p.set_panels()
1806            ylab=s._get_ordinate_label()
1807            #self._p.palette(0,["#777777","red"])
1808            for r in xrange(s.nrow()):
1809                xsm=s._getabcissa(r)
1810                ysm=s._getspectrum(r)
1811                xorg=orgscan._getabcissa(r)
1812                yorg=orgscan._getspectrum(r)
1813                self._p.clear()
1814                self._p.hold()
1815                self._p.set_axes('ylabel',ylab)
1816                self._p.set_axes('xlabel',s._getabcissalabel(r))
1817                self._p.set_axes('title',s._getsourcename(r))
1818                self._p.set_line(label='Original',color="#777777")
1819                self._p.plot(xorg,yorg)
1820                self._p.set_line(label='Smoothed',color="red")
1821                self._p.plot(xsm,ysm)
1822                ### Ugly part for legend
1823                for i in [0,1]:
1824                    self._p.subplots[0]['lines'].append([self._p.subplots[0]['axes'].lines[i]])
1825                self._p.release()
1826                ### Ugly part for legend
1827                self._p.subplots[0]['lines']=[]
1828                res = raw_input("Accept smoothing ([y]/n): ")
1829                if res.upper() == 'N':
1830                    s._setspectrum(yorg, r)
1831            self._p.unmap()
1832            self._p = None
1833            del orgscan
1834
[876]1835        if insitu: self._assign(s)
1836        else: return s
[513]1837
[1862]1838    @asaplog_post_dec
[1617]1839    def poly_baseline(self, mask=None, order=0, plot=False, uselin=False,
1840                      insitu=None):
[1846]1841        """\
[513]1842        Return a scan which has been baselined (all rows) by a polynomial.
[1846]1843
[513]1844        Parameters:
[1846]1845
[794]1846            mask:       an optional mask
[1855]1847
[794]1848            order:      the order of the polynomial (default is 0)
[1855]1849
[1061]1850            plot:       plot the fit and the residual. In this each
1851                        indivual fit has to be approved, by typing 'y'
1852                        or 'n'
[1855]1853
[1391]1854            uselin:     use linear polynomial fit
[1855]1855
[794]1856            insitu:     if False a new scantable is returned.
1857                        Otherwise, the scaling is done in-situ
1858                        The default is taken from .asaprc (False)
[1846]1859
1860        Example::
1861
[513]1862            # return a scan baselined by a third order polynomial,
1863            # not using a mask
1864            bscan = scan.poly_baseline(order=3)
[1846]1865
[579]1866        """
[513]1867        if insitu is None: insitu = rcParams['insitu']
[1819]1868        if not insitu:
1869            workscan = self.copy()
1870        else:
1871            workscan = self
[513]1872        varlist = vars()
1873        if mask is None:
[1295]1874            mask = [True for i in xrange(self.nchan(-1))]
[1819]1875
[513]1876        from asap.asapfitter import fitter
[1217]1877        try:
1878            f = fitter()
[1391]1879            if uselin:
1880                f.set_function(lpoly=order)
1881            else:
1882                f.set_function(poly=order)
[1819]1883
1884            rows = range(workscan.nrow())
1885            if len(rows) > 0:
1886                self.blpars = []
1887
1888            for r in rows:
1889                # take into account flagtra info (CAS-1434)
1890                flagtra = workscan._getmask(r)
1891                actualmask = mask[:]
1892                if len(actualmask) == 0:
1893                    actualmask = list(flagtra[:])
1894                else:
1895                    if len(actualmask) != len(flagtra):
1896                        raise RuntimeError, "Mask and flagtra have different length"
1897                    else:
1898                        for i in range(0, len(actualmask)):
1899                            actualmask[i] = actualmask[i] and flagtra[i]
1900                f.set_scan(workscan, actualmask)
1901                f.x = workscan._getabcissa(r)
1902                f.y = workscan._getspectrum(r)
1903                f.data = None
1904                f.fit()
1905                if plot:
1906                    f.plot(residual=True)
1907                    x = raw_input("Accept fit ( [y]/n ): ")
1908                    if x.upper() == 'N':
1909                        self.blpars.append(None)
1910                        continue
1911                workscan._setspectrum(f.fitter.getresidual(), r)
1912                self.blpars.append(f.get_parameters())
1913
1914            if plot:
1915                f._p.unmap()
1916                f._p = None
1917            workscan._add_history("poly_baseline", varlist)
[1856]1918            if insitu:
1919                self._assign(workscan)
1920            else:
1921                return workscan
[1217]1922        except RuntimeError:
1923            msg = "The fit failed, possibly because it didn't converge."
[1859]1924            raise RuntimeError(msg)
[513]1925
[1819]1926
[1118]1927    def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
[1280]1928                           threshold=3, chan_avg_limit=1, plot=False,
1929                           insitu=None):
[1846]1930        """\
[880]1931        Return a scan which has been baselined (all rows) by a polynomial.
1932        Spectral lines are detected first using linefinder and masked out
1933        to avoid them affecting the baseline solution.
1934
1935        Parameters:
[1846]1936
[880]1937            mask:       an optional mask retreived from scantable
[1846]1938
1939            edge:       an optional number of channel to drop at the edge of
1940                        spectrum. If only one value is
[880]1941                        specified, the same number will be dropped from
1942                        both sides of the spectrum. Default is to keep
[907]1943                        all channels. Nested tuples represent individual
[976]1944                        edge selection for different IFs (a number of spectral
1945                        channels can be different)
[1846]1946
[880]1947            order:      the order of the polynomial (default is 0)
[1846]1948
[880]1949            threshold:  the threshold used by line finder. It is better to
1950                        keep it large as only strong lines affect the
1951                        baseline solution.
[1846]1952
[1280]1953            chan_avg_limit:
1954                        a maximum number of consequtive spectral channels to
1955                        average during the search of weak and broad lines.
1956                        The default is no averaging (and no search for weak
1957                        lines). If such lines can affect the fitted baseline
1958                        (e.g. a high order polynomial is fitted), increase this
1959                        parameter (usually values up to 8 are reasonable). Most
1960                        users of this method should find the default value
1961                        sufficient.
[1846]1962
[1061]1963            plot:       plot the fit and the residual. In this each
1964                        indivual fit has to be approved, by typing 'y'
1965                        or 'n'
[1846]1966
[880]1967            insitu:     if False a new scantable is returned.
1968                        Otherwise, the scaling is done in-situ
1969                        The default is taken from .asaprc (False)
1970
[1846]1971
1972        Example::
1973
1974            scan2 = scan.auto_poly_baseline(order=7, insitu=False)
1975
[880]1976        """
1977        if insitu is None: insitu = rcParams['insitu']
1978        varlist = vars()
1979        from asap.asapfitter import fitter
1980        from asap.asaplinefind import linefinder
1981        from asap import _is_sequence_or_number as _is_valid
1982
[976]1983        # check whether edge is set up for each IF individually
[1118]1984        individualedge = False;
1985        if len(edge) > 1:
1986            if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1987                individualedge = True;
[907]1988
[1118]1989        if not _is_valid(edge, int) and not individualedge:
[909]1990            raise ValueError, "Parameter 'edge' has to be an integer or a \
[907]1991            pair of integers specified as a tuple. Nested tuples are allowed \
1992            to make individual selection for different IFs."
[919]1993
[1118]1994        curedge = (0, 0)
1995        if individualedge:
1996            for edgepar in edge:
1997                if not _is_valid(edgepar, int):
1998                    raise ValueError, "Each element of the 'edge' tuple has \
1999                                       to be a pair of integers or an integer."
[907]2000        else:
[1118]2001            curedge = edge;
[880]2002
2003        # setup fitter
2004        f = fitter()
2005        f.set_function(poly=order)
2006
2007        # setup line finder
[1118]2008        fl = linefinder()
[1268]2009        fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
[880]2010
2011        if not insitu:
[1118]2012            workscan = self.copy()
[880]2013        else:
[1118]2014            workscan = self
[880]2015
[907]2016        fl.set_scan(workscan)
2017
[1118]2018        rows = range(workscan.nrow())
[1819]2019        # Save parameters of baseline fits & masklists as a class attribute.
2020        # NOTICE: It does not reflect changes in scantable!
2021        if len(rows) > 0:
2022            self.blpars=[]
2023            self.masklists=[]
[880]2024        asaplog.push("Processing:")
2025        for r in rows:
[1118]2026            msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
2027                (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
2028                 workscan.getpol(r), workscan.getcycle(r))
[880]2029            asaplog.push(msg, False)
[907]2030
[976]2031            # figure out edge parameter
[1118]2032            if individualedge:
2033                if len(edge) >= workscan.getif(r):
2034                    raise RuntimeError, "Number of edge elements appear to " \
2035                                        "be less than the number of IFs"
2036                    curedge = edge[workscan.getif(r)]
[919]2037
[1819]2038            # take into account flagtra info (CAS-1434)
2039            flagtra = workscan._getmask(r)
2040            actualmask = mask[:]
2041            if len(actualmask) == 0:
2042                actualmask = list(flagtra[:])
2043            else:
2044                if len(actualmask) != len(flagtra):
2045                    raise RuntimeError, "Mask and flagtra have different length"
2046                else:
2047                    for i in range(0, len(actualmask)):
2048                        actualmask[i] = actualmask[i] and flagtra[i]
2049
[976]2050            # setup line finder
[1819]2051            fl.find_lines(r, actualmask, curedge)
2052            outmask=fl.get_mask()
2053            f.set_scan(workscan, fl.get_mask())
2054            f.x = workscan._getabcissa(r)
2055            f.y = workscan._getspectrum(r)
2056            f.data = None
[880]2057            f.fit()
[1819]2058
2059            # Show mask list
2060            masklist=workscan.get_masklist(fl.get_mask(),row=r)
2061            msg = "mask range: "+str(masklist)
2062            asaplog.push(msg, False)
2063
[1061]2064            if plot:
2065                f.plot(residual=True)
2066                x = raw_input("Accept fit ( [y]/n ): ")
2067                if x.upper() == 'N':
[1819]2068                    self.blpars.append(None)
2069                    self.masklists.append(None)
[1061]2070                    continue
[1819]2071
[880]2072            workscan._setspectrum(f.fitter.getresidual(), r)
[1819]2073            self.blpars.append(f.get_parameters())
2074            self.masklists.append(masklist)
[1061]2075        if plot:
2076            f._p.unmap()
2077            f._p = None
2078        workscan._add_history("auto_poly_baseline", varlist)
[880]2079        if insitu:
2080            self._assign(workscan)
2081        else:
2082            return workscan
2083
[1862]2084    @asaplog_post_dec
[914]2085    def rotate_linpolphase(self, angle):
[1846]2086        """\
[914]2087        Rotate the phase of the complex polarization O=Q+iU correlation.
2088        This is always done in situ in the raw data.  So if you call this
2089        function more than once then each call rotates the phase further.
[1846]2090
[914]2091        Parameters:
[1846]2092
[914]2093            angle:   The angle (degrees) to rotate (add) by.
[1846]2094
2095        Example::
2096
[914]2097            scan.rotate_linpolphase(2.3)
[1846]2098
[914]2099        """
2100        varlist = vars()
[936]2101        self._math._rotate_linpolphase(self, angle)
[914]2102        self._add_history("rotate_linpolphase", varlist)
2103        return
[710]2104
[1862]2105    @asaplog_post_dec
[914]2106    def rotate_xyphase(self, angle):
[1846]2107        """\
[914]2108        Rotate the phase of the XY correlation.  This is always done in situ
2109        in the data.  So if you call this function more than once
2110        then each call rotates the phase further.
[1846]2111
[914]2112        Parameters:
[1846]2113
[914]2114            angle:   The angle (degrees) to rotate (add) by.
[1846]2115
2116        Example::
2117
[914]2118            scan.rotate_xyphase(2.3)
[1846]2119
[914]2120        """
2121        varlist = vars()
[936]2122        self._math._rotate_xyphase(self, angle)
[914]2123        self._add_history("rotate_xyphase", varlist)
2124        return
2125
[1862]2126    @asaplog_post_dec
[914]2127    def swap_linears(self):
[1846]2128        """\
[1573]2129        Swap the linear polarisations XX and YY, or better the first two
[1348]2130        polarisations as this also works for ciculars.
[914]2131        """
2132        varlist = vars()
[936]2133        self._math._swap_linears(self)
[914]2134        self._add_history("swap_linears", varlist)
2135        return
2136
[1862]2137    @asaplog_post_dec
[914]2138    def invert_phase(self):
[1846]2139        """\
[914]2140        Invert the phase of the complex polarisation
2141        """
2142        varlist = vars()
[936]2143        self._math._invert_phase(self)
[914]2144        self._add_history("invert_phase", varlist)
2145        return
2146
[1862]2147    @asaplog_post_dec
[876]2148    def add(self, offset, insitu=None):
[1846]2149        """\
[513]2150        Return a scan where all spectra have the offset added
[1846]2151
[513]2152        Parameters:
[1846]2153
[513]2154            offset:      the offset
[1855]2155
[513]2156            insitu:      if False a new scantable is returned.
2157                         Otherwise, the scaling is done in-situ
2158                         The default is taken from .asaprc (False)
[1846]2159
[513]2160        """
2161        if insitu is None: insitu = rcParams['insitu']
[876]2162        self._math._setinsitu(insitu)
[513]2163        varlist = vars()
[876]2164        s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]2165        s._add_history("add", varlist)
[876]2166        if insitu:
2167            self._assign(s)
2168        else:
[513]2169            return s
2170
[1862]2171    @asaplog_post_dec
[1308]2172    def scale(self, factor, tsys=True, insitu=None):
[1846]2173        """\
2174
[513]2175        Return a scan where all spectra are scaled by the give 'factor'
[1846]2176
[513]2177        Parameters:
[1846]2178
[1819]2179            factor:      the scaling factor (float or 1D float list)
[1855]2180
[513]2181            insitu:      if False a new scantable is returned.
2182                         Otherwise, the scaling is done in-situ
2183                         The default is taken from .asaprc (False)
[1855]2184
[513]2185            tsys:        if True (default) then apply the operation to Tsys
2186                         as well as the data
[1846]2187
[513]2188        """
2189        if insitu is None: insitu = rcParams['insitu']
[876]2190        self._math._setinsitu(insitu)
[513]2191        varlist = vars()
[1819]2192        s = None
2193        import numpy
2194        if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
2195            if isinstance(factor[0], list) or isinstance(factor[0], numpy.ndarray):
2196                from asapmath import _array2dOp
2197                s = _array2dOp( self.copy(), factor, "MUL", tsys )
2198            else:
2199                s = scantable( self._math._arrayop( self.copy(), factor, "MUL", tsys ) )
2200        else:
2201            s = scantable(self._math._unaryop(self.copy(), factor, "MUL", tsys))
[1118]2202        s._add_history("scale", varlist)
[876]2203        if insitu:
2204            self._assign(s)
2205        else:
[513]2206            return s
2207
[1504]2208    def set_sourcetype(self, match, matchtype="pattern",
2209                       sourcetype="reference"):
[1846]2210        """\
[1502]2211        Set the type of the source to be an source or reference scan
[1846]2212        using the provided pattern.
2213
[1502]2214        Parameters:
[1846]2215
[1504]2216            match:          a Unix style pattern, regular expression or selector
[1855]2217
[1504]2218            matchtype:      'pattern' (default) UNIX style pattern or
2219                            'regex' regular expression
[1855]2220
[1502]2221            sourcetype:     the type of the source to use (source/reference)
[1846]2222
[1502]2223        """
2224        varlist = vars()
2225        basesel = self.get_selection()
2226        stype = -1
2227        if sourcetype.lower().startswith("r"):
2228            stype = 1
2229        elif sourcetype.lower().startswith("s"):
2230            stype = 0
[1504]2231        else:
[1502]2232            raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
[1504]2233        if matchtype.lower().startswith("p"):
2234            matchtype = "pattern"
2235        elif matchtype.lower().startswith("r"):
2236            matchtype = "regex"
2237        else:
2238            raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
[1502]2239        sel = selector()
2240        if isinstance(match, selector):
2241            sel = match
2242        else:
[1504]2243            sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
[1502]2244        self.set_selection(basesel+sel)
2245        self._setsourcetype(stype)
2246        self.set_selection(basesel)
[1573]2247        self._add_history("set_sourcetype", varlist)
[1502]2248
[1862]2249    @asaplog_post_dec
[1857]2250    @preserve_selection
[1819]2251    def auto_quotient(self, preserve=True, mode='paired', verify=False):
[1846]2252        """\
[670]2253        This function allows to build quotients automatically.
[1819]2254        It assumes the observation to have the same number of
[670]2255        "ons" and "offs"
[1846]2256
[670]2257        Parameters:
[1846]2258
[710]2259            preserve:       you can preserve (default) the continuum or
2260                            remove it.  The equations used are
[1857]2261
[670]2262                            preserve: Output = Toff * (on/off) - Toff
[1857]2263
[1070]2264                            remove:   Output = Toff * (on/off) - Ton
[1855]2265
[1573]2266            mode:           the on/off detection mode
[1348]2267                            'paired' (default)
2268                            identifies 'off' scans by the
2269                            trailing '_R' (Mopra/Parkes) or
2270                            '_e'/'_w' (Tid) and matches
2271                            on/off pairs from the observing pattern
[1502]2272                            'time'
2273                            finds the closest off in time
[1348]2274
[1857]2275        .. todo:: verify argument is not implemented
2276
[670]2277        """
[1857]2278        varlist = vars()
[1348]2279        modes = ["time", "paired"]
[670]2280        if not mode in modes:
[876]2281            msg = "please provide valid mode. Valid modes are %s" % (modes)
2282            raise ValueError(msg)
[1348]2283        s = None
2284        if mode.lower() == "paired":
[1857]2285            sel = self.get_selection()
[1875]2286            sel.set_query("SRCTYPE==psoff")
[1356]2287            self.set_selection(sel)
[1348]2288            offs = self.copy()
[1875]2289            sel.set_query("SRCTYPE==pson")
[1356]2290            self.set_selection(sel)
[1348]2291            ons = self.copy()
2292            s = scantable(self._math._quotient(ons, offs, preserve))
2293        elif mode.lower() == "time":
2294            s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]2295        s._add_history("auto_quotient", varlist)
[876]2296        return s
[710]2297
[1862]2298    @asaplog_post_dec
[1145]2299    def mx_quotient(self, mask = None, weight='median', preserve=True):
[1846]2300        """\
[1143]2301        Form a quotient using "off" beams when observing in "MX" mode.
[1846]2302
[1143]2303        Parameters:
[1846]2304
[1145]2305            mask:           an optional mask to be used when weight == 'stddev'
[1855]2306
[1143]2307            weight:         How to average the off beams.  Default is 'median'.
[1855]2308
[1145]2309            preserve:       you can preserve (default) the continuum or
[1855]2310                            remove it.  The equations used are:
[1846]2311
[1855]2312                                preserve: Output = Toff * (on/off) - Toff
2313
2314                                remove:   Output = Toff * (on/off) - Ton
2315
[1217]2316        """
[1593]2317        mask = mask or ()
[1141]2318        varlist = vars()
2319        on = scantable(self._math._mx_extract(self, 'on'))
[1143]2320        preoff = scantable(self._math._mx_extract(self, 'off'))
2321        off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1217]2322        from asapmath  import quotient
[1145]2323        q = quotient(on, off, preserve)
[1143]2324        q._add_history("mx_quotient", varlist)
[1217]2325        return q
[513]2326
[1862]2327    @asaplog_post_dec
[718]2328    def freq_switch(self, insitu=None):
[1846]2329        """\
[718]2330        Apply frequency switching to the data.
[1846]2331
[718]2332        Parameters:
[1846]2333
[718]2334            insitu:      if False a new scantable is returned.
2335                         Otherwise, the swictching is done in-situ
2336                         The default is taken from .asaprc (False)
[1846]2337
[718]2338        """
2339        if insitu is None: insitu = rcParams['insitu']
[876]2340        self._math._setinsitu(insitu)
[718]2341        varlist = vars()
[876]2342        s = scantable(self._math._freqswitch(self))
[1118]2343        s._add_history("freq_switch", varlist)
[1856]2344        if insitu:
2345            self._assign(s)
2346        else:
2347            return s
[718]2348
[1862]2349    @asaplog_post_dec
[780]2350    def recalc_azel(self):
[1846]2351        """Recalculate the azimuth and elevation for each position."""
[780]2352        varlist = vars()
[876]2353        self._recalcazel()
[780]2354        self._add_history("recalc_azel", varlist)
2355        return
2356
[1862]2357    @asaplog_post_dec
[513]2358    def __add__(self, other):
2359        varlist = vars()
2360        s = None
2361        if isinstance(other, scantable):
[1573]2362            s = scantable(self._math._binaryop(self, other, "ADD"))
[513]2363        elif isinstance(other, float):
[876]2364            s = scantable(self._math._unaryop(self, other, "ADD", False))
[513]2365        else:
[718]2366            raise TypeError("Other input is not a scantable or float value")
[513]2367        s._add_history("operator +", varlist)
2368        return s
2369
[1862]2370    @asaplog_post_dec
[513]2371    def __sub__(self, other):
2372        """
2373        implicit on all axes and on Tsys
2374        """
2375        varlist = vars()
2376        s = None
2377        if isinstance(other, scantable):
[1588]2378            s = scantable(self._math._binaryop(self, other, "SUB"))
[513]2379        elif isinstance(other, float):
[876]2380            s = scantable(self._math._unaryop(self, other, "SUB", False))
[513]2381        else:
[718]2382            raise TypeError("Other input is not a scantable or float value")
[513]2383        s._add_history("operator -", varlist)
2384        return s
[710]2385
[1862]2386    @asaplog_post_dec
[513]2387    def __mul__(self, other):
2388        """
2389        implicit on all axes and on Tsys
2390        """
2391        varlist = vars()
2392        s = None
2393        if isinstance(other, scantable):
[1588]2394            s = scantable(self._math._binaryop(self, other, "MUL"))
[513]2395        elif isinstance(other, float):
[876]2396            s = scantable(self._math._unaryop(self, other, "MUL", False))
[513]2397        else:
[718]2398            raise TypeError("Other input is not a scantable or float value")
[513]2399        s._add_history("operator *", varlist)
2400        return s
2401
[710]2402
[1862]2403    @asaplog_post_dec
[513]2404    def __div__(self, other):
2405        """
2406        implicit on all axes and on Tsys
2407        """
2408        varlist = vars()
2409        s = None
2410        if isinstance(other, scantable):
[1589]2411            s = scantable(self._math._binaryop(self, other, "DIV"))
[513]2412        elif isinstance(other, float):
2413            if other == 0.0:
[718]2414                raise ZeroDivisionError("Dividing by zero is not recommended")
[876]2415            s = scantable(self._math._unaryop(self, other, "DIV", False))
[513]2416        else:
[718]2417            raise TypeError("Other input is not a scantable or float value")
[513]2418        s._add_history("operator /", varlist)
2419        return s
2420
[1862]2421    @asaplog_post_dec
[530]2422    def get_fit(self, row=0):
[1846]2423        """\
[530]2424        Print or return the stored fits for a row in the scantable
[1846]2425
[530]2426        Parameters:
[1846]2427
[530]2428            row:    the row which the fit has been applied to.
[1846]2429
[530]2430        """
2431        if row > self.nrow():
2432            return
[976]2433        from asap.asapfit import asapfit
[530]2434        fit = asapfit(self._getfit(row))
[1859]2435        asaplog.push( '%s' %(fit) )
2436        return fit.as_dict()
[530]2437
[1483]2438    def flag_nans(self):
[1846]2439        """\
[1483]2440        Utility function to flag NaN values in the scantable.
2441        """
2442        import numpy
2443        basesel = self.get_selection()
2444        for i in range(self.nrow()):
[1589]2445            sel = self.get_row_selector(i)
2446            self.set_selection(basesel+sel)
[1483]2447            nans = numpy.isnan(self._getspectrum(0))
2448        if numpy.any(nans):
2449            bnans = [ bool(v) for v in nans]
2450            self.flag(bnans)
2451        self.set_selection(basesel)
2452
[1588]2453    def get_row_selector(self, rowno):
2454        return selector(beams=self.getbeam(rowno),
2455                        ifs=self.getif(rowno),
2456                        pols=self.getpol(rowno),
2457                        scans=self.getscan(rowno),
2458                        cycles=self.getcycle(rowno))
[1573]2459
[484]2460    def _add_history(self, funcname, parameters):
[1435]2461        if not rcParams['scantable.history']:
2462            return
[484]2463        # create date
2464        sep = "##"
2465        from datetime import datetime
2466        dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
2467        hist = dstr+sep
2468        hist += funcname+sep#cdate+sep
2469        if parameters.has_key('self'): del parameters['self']
[1118]2470        for k, v in parameters.iteritems():
[484]2471            if type(v) is dict:
[1118]2472                for k2, v2 in v.iteritems():
[484]2473                    hist += k2
2474                    hist += "="
[1118]2475                    if isinstance(v2, scantable):
[484]2476                        hist += 'scantable'
2477                    elif k2 == 'mask':
[1118]2478                        if isinstance(v2, list) or isinstance(v2, tuple):
[513]2479                            hist += str(self._zip_mask(v2))
2480                        else:
2481                            hist += str(v2)
[484]2482                    else:
[513]2483                        hist += str(v2)
[484]2484            else:
2485                hist += k
2486                hist += "="
[1118]2487                if isinstance(v, scantable):
[484]2488                    hist += 'scantable'
2489                elif k == 'mask':
[1118]2490                    if isinstance(v, list) or isinstance(v, tuple):
[513]2491                        hist += str(self._zip_mask(v))
2492                    else:
2493                        hist += str(v)
[484]2494                else:
2495                    hist += str(v)
2496            hist += sep
2497        hist = hist[:-2] # remove trailing '##'
2498        self._addhistory(hist)
2499
[710]2500
[484]2501    def _zip_mask(self, mask):
2502        mask = list(mask)
2503        i = 0
2504        segments = []
2505        while mask[i:].count(1):
2506            i += mask[i:].index(1)
2507            if mask[i:].count(0):
2508                j = i + mask[i:].index(0)
2509            else:
[710]2510                j = len(mask)
[1118]2511            segments.append([i, j])
[710]2512            i = j
[484]2513        return segments
[714]2514
[626]2515    def _get_ordinate_label(self):
2516        fu = "("+self.get_fluxunit()+")"
2517        import re
2518        lbl = "Intensity"
[1118]2519        if re.match(".K.", fu):
[626]2520            lbl = "Brightness Temperature "+ fu
[1118]2521        elif re.match(".Jy.", fu):
[626]2522            lbl = "Flux density "+ fu
2523        return lbl
[710]2524
[876]2525    def _check_ifs(self):
2526        nchans = [self.nchan(i) for i in range(self.nif(-1))]
[889]2527        nchans = filter(lambda t: t > 0, nchans)
[876]2528        return (sum(nchans)/len(nchans) == nchans[0])
[976]2529
[1862]2530    @asaplog_post_dec
[1819]2531    def _fill(self, names, unit, average, getpt, antenna):
[976]2532        first = True
2533        fullnames = []
2534        for name in names:
2535            name = os.path.expandvars(name)
2536            name = os.path.expanduser(name)
2537            if not os.path.exists(name):
2538                msg = "File '%s' does not exists" % (name)
2539                raise IOError(msg)
2540            fullnames.append(name)
2541        if average:
2542            asaplog.push('Auto averaging integrations')
[1079]2543        stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]2544        for name in fullnames:
[1073]2545            tbl = Scantable(stype)
[1843]2546            r = filler(tbl)
[1504]2547            rx = rcParams['scantable.reference']
[1843]2548            r.setreferenceexpr(rx)
[976]2549            msg = "Importing %s..." % (name)
[1118]2550            asaplog.push(msg, False)
[1843]2551            r.open(name)# antenna, -1, -1, getpt)
2552            r.fill()
[976]2553            if average:
[1118]2554                tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]2555            if not first:
2556                tbl = self._math._merge([self, tbl])
2557            Scantable.__init__(self, tbl)
[1843]2558            r.close()
[1118]2559            del r, tbl
[976]2560            first = False
[1861]2561            #flush log
2562        asaplog.post()
[976]2563        if unit is not None:
2564            self.set_fluxunit(unit)
[1824]2565        if not is_casapy():
2566            self.set_freqframe(rcParams['scantable.freqframe'])
[976]2567
[1402]2568    def __getitem__(self, key):
2569        if key < 0:
2570            key += self.nrow()
2571        if key >= self.nrow():
2572            raise IndexError("Row index out of range.")
2573        return self._getspectrum(key)
2574
2575    def __setitem__(self, key, value):
2576        if key < 0:
2577            key += self.nrow()
2578        if key >= self.nrow():
2579            raise IndexError("Row index out of range.")
2580        if not hasattr(value, "__len__") or \
2581                len(value) > self.nchan(self.getif(key)):
2582            raise ValueError("Spectrum length doesn't match.")
2583        return self._setspectrum(value, key)
2584
2585    def __len__(self):
2586        return self.nrow()
2587
2588    def __iter__(self):
2589        for i in range(len(self)):
2590            yield self[i]
Note: See TracBrowser for help on using the repository browser.