source: trunk/python/scantable.py @ 1992

Last change on this file since 1992 was 1992, checked in by Kana Sugimoto, 13 years ago

New Development: No

JIRA Issue: No (minor improvements)

Ready for Test: Yes

Interface Changes: No

What Interface Changed:

Test Programs: run scantable.get_row(rowno) and scantable.get_row selector(rowno)

to select single row, rowno, in scantable.

Put in Release Notes: No

Module(s): scantable class

Description:

More straightforward row selection by using a parameter rows in scantable.get_row
and scantable.get_row_selector


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 93.3 KB
RevLine 
[1846]1"""This module defines the scantable class."""
2
[1697]3import os
[1948]4import numpy
[1691]5try:
6    from functools import wraps as wraps_dec
7except ImportError:
8    from asap.compatibility import wraps as wraps_dec
9
[1824]10from asap.env import is_casapy
[876]11from asap._asap import Scantable
[1843]12from asap._asap import filler
[1824]13from asap.parameters import rcParams
[1862]14from asap.logging import asaplog, asaplog_post_dec
[1824]15from asap.selector import selector
16from asap.linecatalog import linecatalog
[1600]17from asap.coordinate import coordinate
[1859]18from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
[1907]19from asap.asapfitter import fitter
[102]20
[1689]21
22def preserve_selection(func):
[1691]23    @wraps_dec(func)
[1689]24    def wrap(obj, *args, **kw):
25        basesel = obj.get_selection()
[1857]26        try:
27            val = func(obj, *args, **kw)
28        finally:
29            obj.set_selection(basesel)
[1689]30        return val
31    return wrap
32
[1846]33def is_scantable(filename):
34    """Is the given file a scantable?
[1689]35
[1846]36    Parameters:
37
38        filename: the name of the file/directory to test
39
40    """
[1883]41    if ( os.path.isdir(filename)
42         and os.path.exists(filename+'/table.info')
43         and os.path.exists(filename+'/table.dat') ):
44        f=open(filename+'/table.info')
45        l=f.readline()
46        f.close()
47        #if ( l.find('Scantable') != -1 ):
48        if ( l.find('Measurement Set') == -1 ):
49            return True
50        else:
51            return False
52    else:
53        return False
54##     return (os.path.isdir(filename)
55##             and not os.path.exists(filename+'/table.f1')
56##             and os.path.exists(filename+'/table.info'))
[1697]57
[1883]58def is_ms(filename):
59    """Is the given file a MeasurementSet?
[1697]60
[1883]61    Parameters:
62
63        filename: the name of the file/directory to test
64
65    """
66    if ( os.path.isdir(filename)
67         and os.path.exists(filename+'/table.info')
68         and os.path.exists(filename+'/table.dat') ):
69        f=open(filename+'/table.info')
70        l=f.readline()
71        f.close()
72        if ( l.find('Measurement Set') != -1 ):
73            return True
74        else:
75            return False
76    else:
77        return False
78   
[876]79class scantable(Scantable):
[1846]80    """\
81        The ASAP container for scans (single-dish data).
[102]82    """
[1819]83
[1862]84    @asaplog_post_dec
[1916]85    #def __init__(self, filename, average=None, unit=None, getpt=None,
86    #             antenna=None, parallactify=None):
87    def __init__(self, filename, average=None, unit=None, parallactify=None, **args):
[1846]88        """\
[102]89        Create a scantable from a saved one or make a reference
[1846]90
[102]91        Parameters:
[1846]92
93            filename:     the name of an asap table on disk
94                          or
95                          the name of a rpfits/sdfits/ms file
96                          (integrations within scans are auto averaged
97                          and the whole file is read) or
98                          [advanced] a reference to an existing scantable
99
100            average:      average all integrations withinb a scan on read.
101                          The default (True) is taken from .asaprc.
102
[484]103            unit:         brightness unit; must be consistent with K or Jy.
[1846]104                          Over-rides the default selected by the filler
105                          (input rpfits/sdfits/ms) or replaces the value
106                          in existing scantables
107
108            getpt:        for MeasurementSet input data only:
109                          If True, all pointing data are filled.
110                          The deafult is False, which makes time to load
111                          the MS data faster in some cases.
112
[1920]113            antenna:      for MeasurementSet input data only:
114                          Antenna selection. integer (id) or string (name or id).
[1846]115
116            parallactify: Indicate that the data had been parallatified. Default
117                          is taken from rc file.
118
[710]119        """
[976]120        if average is None:
[710]121            average = rcParams['scantable.autoaverage']
[1916]122        #if getpt is None:
123        #    getpt = True
124        #if antenna is not None:
125        #    asaplog.push("Antenna selection currently unsupported."
126        #                 "Using ''")
127        #    asaplog.post('WARN')
128        #if antenna is None:
129        #    antenna = ''
130        #elif type(antenna) == int:
131        #    antenna = '%s' % antenna
132        #elif type(antenna) == list:
133        #    tmpstr = ''
134        #    for i in range( len(antenna) ):
135        #        if type(antenna[i]) == int:
136        #            tmpstr = tmpstr + ('%s,'%(antenna[i]))
137        #        elif type(antenna[i]) == str:
138        #            tmpstr=tmpstr+antenna[i]+','
139        #        else:
140        #            raise TypeError('Bad antenna selection.')
141        #    antenna = tmpstr.rstrip(',')
[1593]142        parallactify = parallactify or rcParams['scantable.parallactify']
[1259]143        varlist = vars()
[876]144        from asap._asap import stmath
[1819]145        self._math = stmath( rcParams['insitu'] )
[876]146        if isinstance(filename, Scantable):
147            Scantable.__init__(self, filename)
[181]148        else:
[1697]149            if isinstance(filename, str):
[976]150                filename = os.path.expandvars(filename)
151                filename = os.path.expanduser(filename)
152                if not os.path.exists(filename):
153                    s = "File '%s' not found." % (filename)
154                    raise IOError(s)
[1697]155                if is_scantable(filename):
156                    ondisk = rcParams['scantable.storage'] == 'disk'
157                    Scantable.__init__(self, filename, ondisk)
158                    if unit is not None:
159                        self.set_fluxunit(unit)
[1819]160                    # do not reset to the default freqframe
161                    #self.set_freqframe(rcParams['scantable.freqframe'])
[1883]162                #elif os.path.isdir(filename) \
163                #         and not os.path.exists(filename+'/table.f1'):
164                elif is_ms(filename):
[1916]165                    # Measurement Set
166                    opts={'ms': {}}
167                    mskeys=['getpt','antenna']
168                    for key in mskeys:
169                        if key in args.keys():
170                            opts['ms'][key] = args[key]
171                    #self._fill([filename], unit, average, getpt, antenna)
172                    self._fill([filename], unit, average, opts)
[1893]173                elif os.path.isfile(filename):
[1916]174                    #self._fill([filename], unit, average, getpt, antenna)
175                    self._fill([filename], unit, average)
[1883]176                else:
[1819]177                    msg = "The given file '%s'is not a valid " \
178                          "asap table." % (filename)
[1859]179                    raise IOError(msg)
[1118]180            elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]181                  and isinstance(filename[-1], str):
[1916]182                #self._fill(filename, unit, average, getpt, antenna)
183                self._fill(filename, unit, average)
[1586]184        self.parallactify(parallactify)
[1259]185        self._add_history("scantable", varlist)
[102]186
[1862]187    @asaplog_post_dec
[876]188    def save(self, name=None, format=None, overwrite=False):
[1846]189        """\
[1280]190        Store the scantable on disk. This can be an asap (aips++) Table,
191        SDFITS or MS2 format.
[1846]192
[116]193        Parameters:
[1846]194
[1093]195            name:        the name of the outputfile. For format "ASCII"
196                         this is the root file name (data in 'name'.txt
[497]197                         and header in 'name'_header.txt)
[1855]198
[116]199            format:      an optional file format. Default is ASAP.
[1855]200                         Allowed are:
201
202                            * 'ASAP' (save as ASAP [aips++] Table),
203                            * 'SDFITS' (save as SDFITS file)
204                            * 'ASCII' (saves as ascii text file)
205                            * 'MS2' (saves as an casacore MeasurementSet V2)
206                            * 'FITS' (save as image FITS - not readable by class)
207                            * 'CLASS' (save as FITS readable by CLASS)
208
[411]209            overwrite:   If the file should be overwritten if it exists.
[256]210                         The default False is to return with warning
[411]211                         without writing the output. USE WITH CARE.
[1855]212
[1846]213        Example::
214
[116]215            scan.save('myscan.asap')
[1118]216            scan.save('myscan.sdfits', 'SDFITS')
[1846]217
[116]218        """
[411]219        from os import path
[1593]220        format = format or rcParams['scantable.save']
[256]221        suffix = '.'+format.lower()
[1118]222        if name is None or name == "":
[256]223            name = 'scantable'+suffix
[718]224            msg = "No filename given. Using default name %s..." % name
225            asaplog.push(msg)
[411]226        name = path.expandvars(name)
[256]227        if path.isfile(name) or path.isdir(name):
228            if not overwrite:
[718]229                msg = "File %s exists." % name
[1859]230                raise IOError(msg)
[451]231        format2 = format.upper()
232        if format2 == 'ASAP':
[116]233            self._save(name)
234        else:
[989]235            from asap._asap import stwriter as stw
[1118]236            writer = stw(format2)
237            writer.write(self, name)
[116]238        return
239
[102]240    def copy(self):
[1846]241        """Return a copy of this scantable.
242
243        *Note*:
244
[1348]245            This makes a full (deep) copy. scan2 = scan1 makes a reference.
[1846]246
247        Example::
248
[102]249            copiedscan = scan.copy()
[1846]250
[102]251        """
[876]252        sd = scantable(Scantable._copy(self))
[113]253        return sd
254
[1093]255    def drop_scan(self, scanid=None):
[1846]256        """\
[1093]257        Return a new scantable where the specified scan number(s) has(have)
258        been dropped.
[1846]259
[1093]260        Parameters:
[1846]261
[1093]262            scanid:    a (list of) scan number(s)
[1846]263
[1093]264        """
265        from asap import _is_sequence_or_number as _is_valid
266        from asap import _to_list
267        from asap import unique
268        if not _is_valid(scanid):
[1859]269            raise RuntimeError( 'Please specify a scanno to drop from the scantable' )
270        scanid = _to_list(scanid)
271        allscans = unique([ self.getscan(i) for i in range(self.nrow())])
272        for sid in scanid: allscans.remove(sid)
273        if len(allscans) == 0:
274            raise ValueError("Can't remove all scans")
275        sel = selector(scans=allscans)
276        return self._select_copy(sel)
[1093]277
[1594]278    def _select_copy(self, selection):
279        orig = self.get_selection()
280        self.set_selection(orig+selection)
281        cp = self.copy()
282        self.set_selection(orig)
283        return cp
284
[102]285    def get_scan(self, scanid=None):
[1855]286        """\
[102]287        Return a specific scan (by scanno) or collection of scans (by
288        source name) in a new scantable.
[1846]289
290        *Note*:
291
[1348]292            See scantable.drop_scan() for the inverse operation.
[1846]293
[102]294        Parameters:
[1846]295
[513]296            scanid:    a (list of) scanno or a source name, unix-style
297                       patterns are accepted for source name matching, e.g.
298                       '*_R' gets all 'ref scans
[1846]299
300        Example::
301
[513]302            # get all scans containing the source '323p459'
303            newscan = scan.get_scan('323p459')
304            # get all 'off' scans
305            refscans = scan.get_scan('*_R')
306            # get a susbset of scans by scanno (as listed in scan.summary())
[1118]307            newscan = scan.get_scan([0, 2, 7, 10])
[1846]308
[102]309        """
310        if scanid is None:
[1859]311            raise RuntimeError( 'Please specify a scan no or name to '
312                                'retrieve from the scantable' )
[102]313        try:
[946]314            bsel = self.get_selection()
315            sel = selector()
[102]316            if type(scanid) is str:
[946]317                sel.set_name(scanid)
[1594]318                return self._select_copy(sel)
[102]319            elif type(scanid) is int:
[946]320                sel.set_scans([scanid])
[1594]321                return self._select_copy(sel)
[381]322            elif type(scanid) is list:
[946]323                sel.set_scans(scanid)
[1594]324                return self._select_copy(sel)
[381]325            else:
[718]326                msg = "Illegal scanid type, use 'int' or 'list' if ints."
[1859]327                raise TypeError(msg)
[102]328        except RuntimeError:
[1859]329            raise
[102]330
331    def __str__(self):
[1118]332        return Scantable._summary(self, True)
[102]333
[976]334    def summary(self, filename=None):
[1846]335        """\
[102]336        Print a summary of the contents of this scantable.
[1846]337
[102]338        Parameters:
[1846]339
[1931]340            filename:    the name of a file to write the putput to
[102]341                         Default - no file output
[1846]342
[102]343        """
[976]344        info = Scantable._summary(self, True)
[102]345        if filename is not None:
[256]346            if filename is "":
347                filename = 'scantable_summary.txt'
[415]348            from os.path import expandvars, isdir
[411]349            filename = expandvars(filename)
[415]350            if not isdir(filename):
[413]351                data = open(filename, 'w')
352                data.write(info)
353                data.close()
354            else:
[718]355                msg = "Illegal file name '%s'." % (filename)
[1859]356                raise IOError(msg)
357        return page(info)
[710]358
[1512]359    def get_spectrum(self, rowno):
[1471]360        """Return the spectrum for the current row in the scantable as a list.
[1846]361
[1471]362        Parameters:
[1846]363
[1573]364             rowno:   the row number to retrieve the spectrum from
[1846]365
[1471]366        """
367        return self._getspectrum(rowno)
[946]368
[1471]369    def get_mask(self, rowno):
370        """Return the mask for the current row in the scantable as a list.
[1846]371
[1471]372        Parameters:
[1846]373
[1573]374             rowno:   the row number to retrieve the mask from
[1846]375
[1471]376        """
377        return self._getmask(rowno)
378
379    def set_spectrum(self, spec, rowno):
[1938]380        """Set the spectrum for the current row in the scantable.
[1846]381
[1471]382        Parameters:
[1846]383
[1855]384             spec:   the new spectrum
[1846]385
[1855]386             rowno:  the row number to set the spectrum for
387
[1471]388        """
389        assert(len(spec) == self.nchan())
390        return self._setspectrum(spec, rowno)
391
[1600]392    def get_coordinate(self, rowno):
393        """Return the (spectral) coordinate for a a given 'rowno'.
[1846]394
395        *Note*:
396
[1600]397            * This coordinate is only valid until a scantable method modifies
398              the frequency axis.
399            * This coordinate does contain the original frequency set-up
400              NOT the new frame. The conversions however are done using the user
401              specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
402              use scantable.freq_align first. Without it there is no closure,
[1846]403              i.e.::
[1600]404
[1846]405                  c = myscan.get_coordinate(0)
406                  c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
407
[1600]408        Parameters:
[1846]409
[1600]410             rowno:    the row number for the spectral coordinate
411
412        """
413        return coordinate(Scantable.get_coordinate(self, rowno))
414
[946]415    def get_selection(self):
[1846]416        """\
[1005]417        Get the selection object currently set on this scantable.
[1846]418
419        Example::
420
[1005]421            sel = scan.get_selection()
422            sel.set_ifs(0)              # select IF 0
423            scan.set_selection(sel)     # apply modified selection
[1846]424
[946]425        """
426        return selector(self._getselection())
427
[1576]428    def set_selection(self, selection=None, **kw):
[1846]429        """\
[1005]430        Select a subset of the data. All following operations on this scantable
431        are only applied to thi selection.
[1846]432
[1005]433        Parameters:
[1697]434
[1846]435            selection:    a selector object (default unset the selection), or
436                          any combination of "pols", "ifs", "beams", "scans",
437                          "cycles", "name", "query"
[1697]438
[1846]439        Examples::
[1697]440
[1005]441            sel = selector()         # create a selection object
[1118]442            self.set_scans([0, 3])    # select SCANNO 0 and 3
[1005]443            scan.set_selection(sel)  # set the selection
444            scan.summary()           # will only print summary of scanno 0 an 3
445            scan.set_selection()     # unset the selection
[1697]446            # or the equivalent
447            scan.set_selection(scans=[0,3])
448            scan.summary()           # will only print summary of scanno 0 an 3
449            scan.set_selection()     # unset the selection
[1846]450
[946]451        """
[1576]452        if selection is None:
453            # reset
454            if len(kw) == 0:
455                selection = selector()
456            else:
457                # try keywords
458                for k in kw:
459                    if k not in selector.fields:
460                        raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
461                selection = selector(**kw)
[946]462        self._setselection(selection)
463
[1819]464    def get_row(self, row=0, insitu=None):
[1846]465        """\
[1819]466        Select a row in the scantable.
467        Return a scantable with single row.
[1846]468
[1819]469        Parameters:
[1846]470
471            row:    row no of integration, default is 0.
472            insitu: if False a new scantable is returned. Otherwise, the
473                    scaling is done in-situ. The default is taken from .asaprc
474                    (False)
475
[1819]476        """
477        if insitu is None: insitu = rcParams['insitu']
478        if not insitu:
479            workscan = self.copy()
480        else:
481            workscan = self
482        # Select a row
483        sel=selector()
[1992]484        sel.set_rows([row])
485        #sel.set_scans([workscan.getscan(row)])
486        #sel.set_cycles([workscan.getcycle(row)])
487        #sel.set_beams([workscan.getbeam(row)])
488        #sel.set_ifs([workscan.getif(row)])
489        #sel.set_polarisations([workscan.getpol(row)])
490        #sel.set_name(workscan._getsourcename(row))
[1819]491        workscan.set_selection(sel)
492        if not workscan.nrow() == 1:
493            msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
494            raise RuntimeError(msg)
495        del sel
496        if insitu:
497            self._assign(workscan)
498        else:
499            return workscan
500
[1862]501    @asaplog_post_dec
[1907]502    def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
[1846]503        """\
[135]504        Determine the specified statistic of the current beam/if/pol
[102]505        Takes a 'mask' as an optional parameter to specify which
506        channels should be excluded.
[1846]507
[102]508        Parameters:
[1846]509
[1819]510            stat:    'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
511                     'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
[1855]512
[135]513            mask:    an optional mask specifying where the statistic
[102]514                     should be determined.
[1855]515
[1819]516            form:    format string to print statistic values
[1846]517
[1907]518            row:     row number of spectrum to process.
519                     (default is None: for all rows)
[1846]520
[1907]521        Example:
[113]522            scan.set_unit('channel')
[1118]523            msk = scan.create_mask([100, 200], [500, 600])
[135]524            scan.stats(stat='mean', mask=m)
[1846]525
[102]526        """
[1593]527        mask = mask or []
[876]528        if not self._check_ifs():
[1118]529            raise ValueError("Cannot apply mask as the IFs have different "
530                             "number of channels. Please use setselection() "
531                             "to select individual IFs")
[1819]532        rtnabc = False
533        if stat.lower().endswith('_abc'): rtnabc = True
534        getchan = False
535        if stat.lower().startswith('min') or stat.lower().startswith('max'):
536            chan = self._math._minmaxchan(self, mask, stat)
537            getchan = True
538            statvals = []
[1907]539        if not rtnabc:
540            if row == None:
541                statvals = self._math._stats(self, mask, stat)
542            else:
543                statvals = self._math._statsrow(self, mask, stat, int(row))
[256]544
[1819]545        #def cb(i):
546        #    return statvals[i]
[256]547
[1819]548        #return self._row_callback(cb, stat)
[102]549
[1819]550        label=stat
551        #callback=cb
552        out = ""
553        #outvec = []
554        sep = '-'*50
[1907]555
556        if row == None:
557            rows = xrange(self.nrow())
558        elif isinstance(row, int):
559            rows = [ row ]
560
561        for i in rows:
[1819]562            refstr = ''
563            statunit= ''
564            if getchan:
565                qx, qy = self.chan2data(rowno=i, chan=chan[i])
566                if rtnabc:
567                    statvals.append(qx['value'])
568                    refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
569                    statunit= '['+qx['unit']+']'
570                else:
571                    refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
572
573            tm = self._gettime(i)
574            src = self._getsourcename(i)
575            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
576            out += 'Time[%s]:\n' % (tm)
[1907]577            if self.nbeam(-1) > 1: out +=  ' Beam[%d] ' % (self.getbeam(i))
578            if self.nif(-1) > 1:   out +=  ' IF[%d] ' % (self.getif(i))
579            if self.npol(-1) > 1:  out +=  ' Pol[%d] ' % (self.getpol(i))
[1819]580            #outvec.append(callback(i))
[1907]581            if len(rows) > 1:
582                # out += ('= %'+form) % (outvec[i]) +'   '+refstr+'\n'
583                out += ('= %'+form) % (statvals[i]) +'   '+refstr+'\n'
584            else:
585                # out += ('= %'+form) % (outvec[0]) +'   '+refstr+'\n'
586                out += ('= %'+form) % (statvals[0]) +'   '+refstr+'\n'
[1819]587            out +=  sep+"\n"
588
[1859]589        import os
590        if os.environ.has_key( 'USER' ):
591            usr = os.environ['USER']
592        else:
593            import commands
594            usr = commands.getoutput( 'whoami' )
595        tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
596        f = open(tmpfile,'w')
597        print >> f, sep
598        print >> f, ' %s %s' % (label, statunit)
599        print >> f, sep
600        print >> f, out
601        f.close()
602        f = open(tmpfile,'r')
603        x = f.readlines()
604        f.close()
605        asaplog.push(''.join(x), False)
606
[1819]607        return statvals
608
609    def chan2data(self, rowno=0, chan=0):
[1846]610        """\
[1819]611        Returns channel/frequency/velocity and spectral value
612        at an arbitrary row and channel in the scantable.
[1846]613
[1819]614        Parameters:
[1846]615
[1819]616            rowno:   a row number in the scantable. Default is the
617                     first row, i.e. rowno=0
[1855]618
[1819]619            chan:    a channel in the scantable. Default is the first
620                     channel, i.e. pos=0
[1846]621
[1819]622        """
623        if isinstance(rowno, int) and isinstance(chan, int):
624            qx = {'unit': self.get_unit(),
625                  'value': self._getabcissa(rowno)[chan]}
626            qy = {'unit': self.get_fluxunit(),
627                  'value': self._getspectrum(rowno)[chan]}
628            return qx, qy
629
[1118]630    def stddev(self, mask=None):
[1846]631        """\
[135]632        Determine the standard deviation of the current beam/if/pol
633        Takes a 'mask' as an optional parameter to specify which
634        channels should be excluded.
[1846]635
[135]636        Parameters:
[1846]637
[135]638            mask:    an optional mask specifying where the standard
639                     deviation should be determined.
640
[1846]641        Example::
642
[135]643            scan.set_unit('channel')
[1118]644            msk = scan.create_mask([100, 200], [500, 600])
[135]645            scan.stddev(mask=m)
[1846]646
[135]647        """
[1118]648        return self.stats(stat='stddev', mask=mask);
[135]649
[1003]650
[1259]651    def get_column_names(self):
[1846]652        """\
[1003]653        Return a  list of column names, which can be used for selection.
654        """
[1259]655        return list(Scantable.get_column_names(self))
[1003]656
[1730]657    def get_tsys(self, row=-1):
[1846]658        """\
[113]659        Return the System temperatures.
[1846]660
661        Parameters:
662
663            row:    the rowno to get the information for. (default all rows)
664
[113]665        Returns:
[1846]666
[876]667            a list of Tsys values for the current selection
[1846]668
[113]669        """
[1730]670        if row > -1:
671            return self._get_column(self._gettsys, row)
[876]672        return self._row_callback(self._gettsys, "Tsys")
[256]673
[1730]674
675    def get_weather(self, row=-1):
[1846]676        """\
677        Return the weather informations.
678
679        Parameters:
680
681            row:    the rowno to get the information for. (default all rows)
682
683        Returns:
684
685            a dict or list of of dicts of values for the current selection
686
687        """
688
[1730]689        values = self._get_column(self._get_weather, row)
690        if row > -1:
691            return {'temperature': values[0],
692                    'pressure': values[1], 'humidity' : values[2],
693                    'windspeed' : values[3], 'windaz' : values[4]
694                    }
695        else:
696            out = []
697            for r in values:
698
699                out.append({'temperature': r[0],
700                            'pressure': r[1], 'humidity' : r[2],
701                            'windspeed' : r[3], 'windaz' : r[4]
702                    })
703            return out
704
[876]705    def _row_callback(self, callback, label):
706        out = ""
[1118]707        outvec = []
[1590]708        sep = '-'*50
[876]709        for i in range(self.nrow()):
710            tm = self._gettime(i)
711            src = self._getsourcename(i)
[1590]712            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
[876]713            out += 'Time[%s]:\n' % (tm)
[1590]714            if self.nbeam(-1) > 1:
715                out +=  ' Beam[%d] ' % (self.getbeam(i))
716            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (self.getif(i))
717            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (self.getpol(i))
[876]718            outvec.append(callback(i))
719            out += '= %3.3f\n' % (outvec[i])
[1590]720            out +=  sep+'\n'
[1859]721
722        asaplog.push(sep)
723        asaplog.push(" %s" % (label))
724        asaplog.push(sep)
725        asaplog.push(out)
[1861]726        asaplog.post()
[1175]727        return outvec
[256]728
[1947]729    def _get_column(self, callback, row=-1, *args):
[1070]730        """
731        """
732        if row == -1:
[1947]733            return [callback(i, *args) for i in range(self.nrow())]
[1070]734        else:
[1819]735            if  0 <= row < self.nrow():
[1947]736                return callback(row, *args)
[256]737
[1070]738
[1948]739    def get_time(self, row=-1, asdatetime=False, prec=-1):
[1846]740        """\
[113]741        Get a list of time stamps for the observations.
[1938]742        Return a datetime object or a string (default) for each
743        integration time stamp in the scantable.
[1846]744
[113]745        Parameters:
[1846]746
[1348]747            row:          row no of integration. Default -1 return all rows
[1855]748
[1348]749            asdatetime:   return values as datetime objects rather than strings
[1846]750
[1948]751            prec:         number of digits shown. Default -1 to automatic calculation.
752                          Note this number is equals to the digits of MVTime,
753                          i.e., 0<prec<3: dates with hh:: only,
754                          <5: with hh:mm:, <7 or 0: with hh:mm:ss,
[1947]755                          and 6> : with hh:mm:ss.tt... (prec-6 t's added)
756
[113]757        """
[1175]758        from datetime import datetime
[1948]759        if prec < 0:
760            # automagically set necessary precision +1
[1950]761            prec = 7 - numpy.floor(numpy.log10(numpy.min(self.get_inttime(row))))
[1948]762            prec = max(6, int(prec))
763        else:
764            prec = max(0, prec)
765        if asdatetime:
766            #precision can be 1 millisecond at max
767            prec = min(12, prec)
768
[1947]769        times = self._get_column(self._gettime, row, prec)
[1348]770        if not asdatetime:
[1392]771            return times
[1947]772        format = "%Y/%m/%d/%H:%M:%S.%f"
773        if prec < 7:
774            nsub = 1 + (((6-prec)/2) % 3)
775            substr = [".%f","%S","%M"]
776            for i in range(nsub):
777                format = format.replace(substr[i],"")
[1175]778        if isinstance(times, list):
[1947]779            return [datetime.strptime(i, format) for i in times]
[1175]780        else:
[1947]781            return datetime.strptime(times, format)
[102]782
[1348]783
784    def get_inttime(self, row=-1):
[1846]785        """\
[1348]786        Get a list of integration times for the observations.
787        Return a time in seconds for each integration in the scantable.
[1846]788
[1348]789        Parameters:
[1846]790
[1348]791            row:    row no of integration. Default -1 return all rows.
[1846]792
[1348]793        """
[1573]794        return self._get_column(self._getinttime, row)
[1348]795
[1573]796
[714]797    def get_sourcename(self, row=-1):
[1846]798        """\
[794]799        Get a list source names for the observations.
[714]800        Return a string for each integration in the scantable.
801        Parameters:
[1846]802
[1348]803            row:    row no of integration. Default -1 return all rows.
[1846]804
[714]805        """
[1070]806        return self._get_column(self._getsourcename, row)
[714]807
[794]808    def get_elevation(self, row=-1):
[1846]809        """\
[794]810        Get a list of elevations for the observations.
811        Return a float for each integration in the scantable.
[1846]812
[794]813        Parameters:
[1846]814
[1348]815            row:    row no of integration. Default -1 return all rows.
[1846]816
[794]817        """
[1070]818        return self._get_column(self._getelevation, row)
[794]819
820    def get_azimuth(self, row=-1):
[1846]821        """\
[794]822        Get a list of azimuths for the observations.
823        Return a float for each integration in the scantable.
[1846]824
[794]825        Parameters:
[1348]826            row:    row no of integration. Default -1 return all rows.
[1846]827
[794]828        """
[1070]829        return self._get_column(self._getazimuth, row)
[794]830
831    def get_parangle(self, row=-1):
[1846]832        """\
[794]833        Get a list of parallactic angles for the observations.
834        Return a float for each integration in the scantable.
[1846]835
[794]836        Parameters:
[1846]837
[1348]838            row:    row no of integration. Default -1 return all rows.
[1846]839
[794]840        """
[1070]841        return self._get_column(self._getparangle, row)
[794]842
[1070]843    def get_direction(self, row=-1):
844        """
845        Get a list of Positions on the sky (direction) for the observations.
[1594]846        Return a string for each integration in the scantable.
[1855]847
[1070]848        Parameters:
[1855]849
[1070]850            row:    row no of integration. Default -1 return all rows
[1855]851
[1070]852        """
853        return self._get_column(self._getdirection, row)
854
[1391]855    def get_directionval(self, row=-1):
[1846]856        """\
[1391]857        Get a list of Positions on the sky (direction) for the observations.
858        Return a float for each integration in the scantable.
[1846]859
[1391]860        Parameters:
[1846]861
[1391]862            row:    row no of integration. Default -1 return all rows
[1846]863
[1391]864        """
865        return self._get_column(self._getdirectionvec, row)
866
[1862]867    @asaplog_post_dec
[102]868    def set_unit(self, unit='channel'):
[1846]869        """\
[102]870        Set the unit for all following operations on this scantable
[1846]871
[102]872        Parameters:
[1846]873
874            unit:    optional unit, default is 'channel'. Use one of '*Hz',
875                     'km/s', 'channel' or equivalent ''
876
[102]877        """
[484]878        varlist = vars()
[1118]879        if unit in ['', 'pixel', 'channel']:
[113]880            unit = ''
881        inf = list(self._getcoordinfo())
882        inf[0] = unit
883        self._setcoordinfo(inf)
[1118]884        self._add_history("set_unit", varlist)
[113]885
[1862]886    @asaplog_post_dec
[484]887    def set_instrument(self, instr):
[1846]888        """\
[1348]889        Set the instrument for subsequent processing.
[1846]890
[358]891        Parameters:
[1846]892
[710]893            instr:    Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]894                      'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[1846]895
[358]896        """
897        self._setInstrument(instr)
[1118]898        self._add_history("set_instument", vars())
[358]899
[1862]900    @asaplog_post_dec
[1190]901    def set_feedtype(self, feedtype):
[1846]902        """\
[1190]903        Overwrite the feed type, which might not be set correctly.
[1846]904
[1190]905        Parameters:
[1846]906
[1190]907            feedtype:     'linear' or 'circular'
[1846]908
[1190]909        """
910        self._setfeedtype(feedtype)
911        self._add_history("set_feedtype", vars())
912
[1862]913    @asaplog_post_dec
[276]914    def set_doppler(self, doppler='RADIO'):
[1846]915        """\
[276]916        Set the doppler for all following operations on this scantable.
[1846]917
[276]918        Parameters:
[1846]919
[276]920            doppler:    One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
[1846]921
[276]922        """
[484]923        varlist = vars()
[276]924        inf = list(self._getcoordinfo())
925        inf[2] = doppler
926        self._setcoordinfo(inf)
[1118]927        self._add_history("set_doppler", vars())
[710]928
[1862]929    @asaplog_post_dec
[226]930    def set_freqframe(self, frame=None):
[1846]931        """\
[113]932        Set the frame type of the Spectral Axis.
[1846]933
[113]934        Parameters:
[1846]935
[591]936            frame:   an optional frame type, default 'LSRK'. Valid frames are:
[1819]937                     'TOPO', 'LSRD', 'LSRK', 'BARY',
[1118]938                     'GEO', 'GALACTO', 'LGROUP', 'CMB'
[1846]939
940        Example::
941
[113]942            scan.set_freqframe('BARY')
[1846]943
[113]944        """
[1593]945        frame = frame or rcParams['scantable.freqframe']
[484]946        varlist = vars()
[1819]947        # "REST" is not implemented in casacore
948        #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
949        #           'GEO', 'GALACTO', 'LGROUP', 'CMB']
950        valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
[1118]951                   'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]952
[989]953        if frame in valid:
[113]954            inf = list(self._getcoordinfo())
955            inf[1] = frame
956            self._setcoordinfo(inf)
[1118]957            self._add_history("set_freqframe", varlist)
[102]958        else:
[1118]959            msg  = "Please specify a valid freq type. Valid types are:\n", valid
[1859]960            raise TypeError(msg)
[710]961
[1862]962    @asaplog_post_dec
[989]963    def set_dirframe(self, frame=""):
[1846]964        """\
[989]965        Set the frame type of the Direction on the sky.
[1846]966
[989]967        Parameters:
[1846]968
[989]969            frame:   an optional frame type, default ''. Valid frames are:
970                     'J2000', 'B1950', 'GALACTIC'
[1846]971
972        Example:
973
[989]974            scan.set_dirframe('GALACTIC')
[1846]975
[989]976        """
977        varlist = vars()
[1859]978        Scantable.set_dirframe(self, frame)
[1118]979        self._add_history("set_dirframe", varlist)
[989]980
[113]981    def get_unit(self):
[1846]982        """\
[113]983        Get the default unit set in this scantable
[1846]984
[113]985        Returns:
[1846]986
[113]987            A unit string
[1846]988
[113]989        """
990        inf = self._getcoordinfo()
991        unit = inf[0]
992        if unit == '': unit = 'channel'
993        return unit
[102]994
[1862]995    @asaplog_post_dec
[158]996    def get_abcissa(self, rowno=0):
[1846]997        """\
[158]998        Get the abcissa in the current coordinate setup for the currently
[113]999        selected Beam/IF/Pol
[1846]1000
[113]1001        Parameters:
[1846]1002
[226]1003            rowno:    an optional row number in the scantable. Default is the
1004                      first row, i.e. rowno=0
[1846]1005
[113]1006        Returns:
[1846]1007
[1348]1008            The abcissa values and the format string (as a dictionary)
[1846]1009
[113]1010        """
[256]1011        abc = self._getabcissa(rowno)
[710]1012        lbl = self._getabcissalabel(rowno)
[158]1013        return abc, lbl
[113]1014
[1862]1015    @asaplog_post_dec
[1819]1016    def flag(self, mask=None, unflag=False):
[1846]1017        """\
[1001]1018        Flag the selected data using an optional channel mask.
[1846]1019
[1001]1020        Parameters:
[1846]1021
[1001]1022            mask:   an optional channel mask, created with create_mask. Default
1023                    (no mask) is all channels.
[1855]1024
[1819]1025            unflag:    if True, unflag the data
[1846]1026
[1001]1027        """
1028        varlist = vars()
[1593]1029        mask = mask or []
[1859]1030        self._flag(mask, unflag)
[1001]1031        self._add_history("flag", varlist)
1032
[1862]1033    @asaplog_post_dec
[1819]1034    def flag_row(self, rows=[], unflag=False):
[1846]1035        """\
[1819]1036        Flag the selected data in row-based manner.
[1846]1037
[1819]1038        Parameters:
[1846]1039
[1843]1040            rows:   list of row numbers to be flagged. Default is no row
1041                    (must be explicitly specified to execute row-based flagging).
[1855]1042
[1819]1043            unflag: if True, unflag the data.
[1846]1044
[1819]1045        """
1046        varlist = vars()
[1859]1047        self._flag_row(rows, unflag)
[1819]1048        self._add_history("flag_row", varlist)
1049
[1862]1050    @asaplog_post_dec
[1819]1051    def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
[1846]1052        """\
[1819]1053        Flag the selected data outside a specified range (in channel-base)
[1846]1054
[1819]1055        Parameters:
[1846]1056
[1819]1057            uthres:      upper threshold.
[1855]1058
[1819]1059            dthres:      lower threshold
[1846]1060
[1819]1061            clipoutside: True for flagging data outside the range [dthres:uthres].
[1928]1062                         False for flagging data inside the range.
[1855]1063
[1846]1064            unflag:      if True, unflag the data.
1065
[1819]1066        """
1067        varlist = vars()
[1859]1068        self._clip(uthres, dthres, clipoutside, unflag)
[1819]1069        self._add_history("clip", varlist)
1070
[1862]1071    @asaplog_post_dec
[1584]1072    def lag_flag(self, start, end, unit="MHz", insitu=None):
[1846]1073        """\
[1192]1074        Flag the data in 'lag' space by providing a frequency to remove.
[1584]1075        Flagged data in the scantable gets interpolated over the region.
[1192]1076        No taper is applied.
[1846]1077
[1192]1078        Parameters:
[1846]1079
[1579]1080            start:    the start frequency (really a period within the
1081                      bandwidth)  or period to remove
[1855]1082
[1579]1083            end:      the end frequency or period to remove
[1855]1084
[1584]1085            unit:     the frequency unit (default "MHz") or "" for
[1579]1086                      explicit lag channels
[1846]1087
1088        *Notes*:
1089
[1579]1090            It is recommended to flag edges of the band or strong
[1348]1091            signals beforehand.
[1846]1092
[1192]1093        """
1094        if insitu is None: insitu = rcParams['insitu']
1095        self._math._setinsitu(insitu)
1096        varlist = vars()
[1579]1097        base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1098        if not (unit == "" or base.has_key(unit)):
[1192]1099            raise ValueError("%s is not a valid unit." % unit)
[1859]1100        if unit == "":
1101            s = scantable(self._math._lag_flag(self, start, end, "lags"))
1102        else:
1103            s = scantable(self._math._lag_flag(self, start*base[unit],
1104                                               end*base[unit], "frequency"))
[1192]1105        s._add_history("lag_flag", varlist)
1106        if insitu:
1107            self._assign(s)
1108        else:
1109            return s
[1001]1110
[1862]1111    @asaplog_post_dec
[113]1112    def create_mask(self, *args, **kwargs):
[1846]1113        """\
[1118]1114        Compute and return a mask based on [min, max] windows.
[189]1115        The specified windows are to be INCLUDED, when the mask is
[113]1116        applied.
[1846]1117
[102]1118        Parameters:
[1846]1119
[1118]1120            [min, max], [min2, max2], ...
[1024]1121                Pairs of start/end points (inclusive)specifying the regions
[102]1122                to be masked
[1855]1123
[189]1124            invert:     optional argument. If specified as True,
1125                        return an inverted mask, i.e. the regions
1126                        specified are EXCLUDED
[1855]1127
[513]1128            row:        create the mask using the specified row for
1129                        unit conversions, default is row=0
1130                        only necessary if frequency varies over rows.
[1846]1131
1132        Examples::
1133
[113]1134            scan.set_unit('channel')
[1846]1135            # a)
[1118]1136            msk = scan.create_mask([400, 500], [800, 900])
[189]1137            # masks everything outside 400 and 500
[113]1138            # and 800 and 900 in the unit 'channel'
1139
[1846]1140            # b)
[1118]1141            msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]1142            # masks the regions between 400 and 500
[113]1143            # and 800 and 900 in the unit 'channel'
[1846]1144
1145            # c)
1146            #mask only channel 400
[1554]1147            msk =  scan.create_mask([400])
[1846]1148
[102]1149        """
[1554]1150        row = kwargs.get("row", 0)
[513]1151        data = self._getabcissa(row)
[113]1152        u = self._getcoordinfo()[0]
[1859]1153        if u == "":
1154            u = "channel"
1155        msg = "The current mask window unit is %s" % u
1156        i = self._check_ifs()
1157        if not i:
1158            msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1159        asaplog.push(msg)
[102]1160        n = self.nchan()
[1295]1161        msk = _n_bools(n, False)
[710]1162        # test if args is a 'list' or a 'normal *args - UGLY!!!
1163
[1118]1164        ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1165             and args or args[0]
[710]1166        for window in ws:
[1554]1167            if len(window) == 1:
1168                window = [window[0], window[0]]
1169            if len(window) == 0 or  len(window) > 2:
1170                raise ValueError("A window needs to be defined as [start(, end)]")
[1545]1171            if window[0] > window[1]:
1172                tmp = window[0]
1173                window[0] = window[1]
1174                window[1] = tmp
[102]1175            for i in range(n):
[1024]1176                if data[i] >= window[0] and data[i] <= window[1]:
[1295]1177                    msk[i] = True
[113]1178        if kwargs.has_key('invert'):
1179            if kwargs.get('invert'):
[1295]1180                msk = mask_not(msk)
[102]1181        return msk
[710]1182
[1931]1183    def get_masklist(self, mask=None, row=0, silent=False):
[1846]1184        """\
[1819]1185        Compute and return a list of mask windows, [min, max].
[1846]1186
[1819]1187        Parameters:
[1846]1188
[1819]1189            mask:       channel mask, created with create_mask.
[1855]1190
[1819]1191            row:        calcutate the masklist using the specified row
1192                        for unit conversions, default is row=0
1193                        only necessary if frequency varies over rows.
[1846]1194
[1819]1195        Returns:
[1846]1196
[1819]1197            [min, max], [min2, max2], ...
1198                Pairs of start/end points (inclusive)specifying
1199                the masked regions
[1846]1200
[1819]1201        """
1202        if not (isinstance(mask,list) or isinstance(mask, tuple)):
1203            raise TypeError("The mask should be list or tuple.")
1204        if len(mask) < 2:
1205            raise TypeError("The mask elements should be > 1")
1206        if self.nchan() != len(mask):
1207            msg = "Number of channels in scantable != number of mask elements"
1208            raise TypeError(msg)
1209        data = self._getabcissa(row)
1210        u = self._getcoordinfo()[0]
[1859]1211        if u == "":
1212            u = "channel"
1213        msg = "The current mask window unit is %s" % u
1214        i = self._check_ifs()
1215        if not i:
1216            msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
[1931]1217        if not silent:
1218            asaplog.push(msg)
[1819]1219        masklist=[]
1220        ist, ien = None, None
1221        ist, ien=self.get_mask_indices(mask)
1222        if ist is not None and ien is not None:
1223            for i in xrange(len(ist)):
1224                range=[data[ist[i]],data[ien[i]]]
1225                range.sort()
1226                masklist.append([range[0],range[1]])
1227        return masklist
1228
1229    def get_mask_indices(self, mask=None):
[1846]1230        """\
[1819]1231        Compute and Return lists of mask start indices and mask end indices.
[1855]1232
1233        Parameters:
1234
[1819]1235            mask:       channel mask, created with create_mask.
[1846]1236
[1819]1237        Returns:
[1846]1238
[1819]1239            List of mask start indices and that of mask end indices,
1240            i.e., [istart1,istart2,....], [iend1,iend2,....].
[1846]1241
[1819]1242        """
1243        if not (isinstance(mask,list) or isinstance(mask, tuple)):
1244            raise TypeError("The mask should be list or tuple.")
1245        if len(mask) < 2:
1246            raise TypeError("The mask elements should be > 1")
1247        istart=[]
1248        iend=[]
1249        if mask[0]: istart.append(0)
1250        for i in range(len(mask)-1):
1251            if not mask[i] and mask[i+1]:
1252                istart.append(i+1)
1253            elif mask[i] and not mask[i+1]:
1254                iend.append(i)
1255        if mask[len(mask)-1]: iend.append(len(mask)-1)
1256        if len(istart) != len(iend):
1257            raise RuntimeError("Numbers of mask start != mask end.")
1258        for i in range(len(istart)):
1259            if istart[i] > iend[i]:
1260                raise RuntimeError("Mask start index > mask end index")
1261                break
1262        return istart,iend
1263
1264#    def get_restfreqs(self):
1265#        """
1266#        Get the restfrequency(s) stored in this scantable.
1267#        The return value(s) are always of unit 'Hz'
1268#        Parameters:
1269#            none
1270#        Returns:
1271#            a list of doubles
1272#        """
1273#        return list(self._getrestfreqs())
1274
1275    def get_restfreqs(self, ids=None):
[1846]1276        """\
[256]1277        Get the restfrequency(s) stored in this scantable.
1278        The return value(s) are always of unit 'Hz'
[1846]1279
[256]1280        Parameters:
[1846]1281
[1819]1282            ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1283                 be retrieved
[1846]1284
[256]1285        Returns:
[1846]1286
[1819]1287            dictionary containing ids and a list of doubles for each id
[1846]1288
[256]1289        """
[1819]1290        if ids is None:
1291            rfreqs={}
1292            idlist = self.getmolnos()
1293            for i in idlist:
1294                rfreqs[i]=list(self._getrestfreqs(i))
1295            return rfreqs
1296        else:
1297            if type(ids)==list or type(ids)==tuple:
1298                rfreqs={}
1299                for i in ids:
1300                    rfreqs[i]=list(self._getrestfreqs(i))
1301                return rfreqs
1302            else:
1303                return list(self._getrestfreqs(ids))
1304            #return list(self._getrestfreqs(ids))
[102]1305
[931]1306    def set_restfreqs(self, freqs=None, unit='Hz'):
[1846]1307        """\
[931]1308        Set or replace the restfrequency specified and
[1938]1309        if the 'freqs' argument holds a scalar,
[931]1310        then that rest frequency will be applied to all the selected
1311        data.  If the 'freqs' argument holds
1312        a vector, then it MUST be of equal or smaller length than
1313        the number of IFs (and the available restfrequencies will be
1314        replaced by this vector).  In this case, *all* data have
1315        the restfrequency set per IF according
1316        to the corresponding value you give in the 'freqs' vector.
[1118]1317        E.g. 'freqs=[1e9, 2e9]'  would mean IF 0 gets restfreq 1e9 and
[931]1318        IF 1 gets restfreq 2e9.
[1846]1319
[1395]1320        You can also specify the frequencies via a linecatalog.
[1153]1321
[931]1322        Parameters:
[1846]1323
[931]1324            freqs:   list of rest frequency values or string idenitfiers
[1855]1325
[931]1326            unit:    unit for rest frequency (default 'Hz')
[402]1327
[1846]1328
1329        Example::
1330
[1819]1331            # set the given restfrequency for the all currently selected IFs
[931]1332            scan.set_restfreqs(freqs=1.4e9)
[1845]1333            # set restfrequencies for the n IFs  (n > 1) in the order of the
1334            # list, i.e
1335            # IF0 -> 1.4e9, IF1 ->  1.41e9, IF3 -> 1.42e9
1336            # len(list_of_restfreqs) == nIF
1337            # for nIF == 1 the following will set multiple restfrequency for
1338            # that IF
[1819]1339            scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
[1845]1340            # set multiple restfrequencies per IF. as a list of lists where
1341            # the outer list has nIF elements, the inner s arbitrary
1342            scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
[391]1343
[1846]1344       *Note*:
[1845]1345
[931]1346            To do more sophisticate Restfrequency setting, e.g. on a
1347            source and IF basis, use scantable.set_selection() before using
[1846]1348            this function::
[931]1349
[1846]1350                # provided your scantable is called scan
1351                selection = selector()
1352                selection.set_name("ORION*")
1353                selection.set_ifs([1])
1354                scan.set_selection(selection)
1355                scan.set_restfreqs(freqs=86.6e9)
1356
[931]1357        """
1358        varlist = vars()
[1157]1359        from asap import linecatalog
1360        # simple  value
[1118]1361        if isinstance(freqs, int) or isinstance(freqs, float):
[1845]1362            self._setrestfreqs([freqs], [""], unit)
[1157]1363        # list of values
[1118]1364        elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]1365            # list values are scalars
[1118]1366            if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[1845]1367                if len(freqs) == 1:
1368                    self._setrestfreqs(freqs, [""], unit)
1369                else:
1370                    # allow the 'old' mode of setting mulitple IFs
1371                    sel = selector()
1372                    savesel = self._getselection()
1373                    iflist = self.getifnos()
1374                    if len(freqs)>len(iflist):
1375                        raise ValueError("number of elements in list of list "
1376                                         "exeeds the current IF selections")
1377                    iflist = self.getifnos()
1378                    for i, fval in enumerate(freqs):
1379                        sel.set_ifs(iflist[i])
1380                        self._setselection(sel)
1381                        self._setrestfreqs([fval], [""], unit)
1382                    self._setselection(savesel)
1383
1384            # list values are dict, {'value'=, 'name'=)
[1157]1385            elif isinstance(freqs[-1], dict):
[1845]1386                values = []
1387                names = []
1388                for d in freqs:
1389                    values.append(d["value"])
1390                    names.append(d["name"])
1391                self._setrestfreqs(values, names, unit)
[1819]1392            elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
[1157]1393                sel = selector()
1394                savesel = self._getselection()
[1322]1395                iflist = self.getifnos()
[1819]1396                if len(freqs)>len(iflist):
[1845]1397                    raise ValueError("number of elements in list of list exeeds"
1398                                     " the current IF selections")
1399                for i, fval in enumerate(freqs):
[1322]1400                    sel.set_ifs(iflist[i])
[1259]1401                    self._setselection(sel)
[1845]1402                    self._setrestfreqs(fval, [""], unit)
[1157]1403                self._setselection(savesel)
1404        # freqs are to be taken from a linecatalog
[1153]1405        elif isinstance(freqs, linecatalog):
1406            sel = selector()
1407            savesel = self._getselection()
1408            for i in xrange(freqs.nrow()):
[1322]1409                sel.set_ifs(iflist[i])
[1153]1410                self._setselection(sel)
[1845]1411                self._setrestfreqs([freqs.get_frequency(i)],
1412                                   [freqs.get_name(i)], "MHz")
[1153]1413                # ensure that we are not iterating past nIF
1414                if i == self.nif()-1: break
1415            self._setselection(savesel)
[931]1416        else:
1417            return
1418        self._add_history("set_restfreqs", varlist)
1419
[1360]1420    def shift_refpix(self, delta):
[1846]1421        """\
[1589]1422        Shift the reference pixel of the Spectra Coordinate by an
1423        integer amount.
[1846]1424
[1589]1425        Parameters:
[1846]1426
[1589]1427            delta:   the amount to shift by
[1846]1428
1429        *Note*:
1430
[1589]1431            Be careful using this with broadband data.
[1846]1432
[1360]1433        """
[1731]1434        Scantable.shift_refpix(self, delta)
[931]1435
[1862]1436    @asaplog_post_dec
[1259]1437    def history(self, filename=None):
[1846]1438        """\
[1259]1439        Print the history. Optionally to a file.
[1846]1440
[1348]1441        Parameters:
[1846]1442
[1928]1443            filename:    The name of the file to save the history to.
[1846]1444
[1259]1445        """
[484]1446        hist = list(self._gethistory())
[794]1447        out = "-"*80
[484]1448        for h in hist:
[489]1449            if h.startswith("---"):
[1857]1450                out = "\n".join([out, h])
[489]1451            else:
1452                items = h.split("##")
1453                date = items[0]
1454                func = items[1]
1455                items = items[2:]
[794]1456                out += "\n"+date+"\n"
1457                out += "Function: %s\n  Parameters:" % (func)
[489]1458                for i in items:
[1938]1459                    if i == '':
1460                        continue
[489]1461                    s = i.split("=")
[1118]1462                    out += "\n   %s = %s" % (s[0], s[1])
[1857]1463                out = "\n".join([out, "-"*80])
[1259]1464        if filename is not None:
1465            if filename is "":
1466                filename = 'scantable_history.txt'
1467            import os
1468            filename = os.path.expandvars(os.path.expanduser(filename))
1469            if not os.path.isdir(filename):
1470                data = open(filename, 'w')
1471                data.write(out)
1472                data.close()
1473            else:
1474                msg = "Illegal file name '%s'." % (filename)
[1859]1475                raise IOError(msg)
1476        return page(out)
[513]1477    #
1478    # Maths business
1479    #
[1862]1480    @asaplog_post_dec
[931]1481    def average_time(self, mask=None, scanav=False, weight='tint', align=False):
[1846]1482        """\
[1070]1483        Return the (time) weighted average of a scan.
[1846]1484
1485        *Note*:
1486
[1070]1487            in channels only - align if necessary
[1846]1488
[513]1489        Parameters:
[1846]1490
[513]1491            mask:     an optional mask (only used for 'var' and 'tsys'
1492                      weighting)
[1855]1493
[558]1494            scanav:   True averages each scan separately
1495                      False (default) averages all scans together,
[1855]1496
[1099]1497            weight:   Weighting scheme.
1498                      'none'     (mean no weight)
1499                      'var'      (1/var(spec) weighted)
1500                      'tsys'     (1/Tsys**2 weighted)
1501                      'tint'     (integration time weighted)
1502                      'tintsys'  (Tint/Tsys**2)
1503                      'median'   ( median averaging)
[535]1504                      The default is 'tint'
[1855]1505
[931]1506            align:    align the spectra in velocity before averaging. It takes
1507                      the time of the first spectrum as reference time.
[1846]1508
1509        Example::
1510
[513]1511            # time average the scantable without using a mask
[710]1512            newscan = scan.average_time()
[1846]1513
[513]1514        """
1515        varlist = vars()
[1593]1516        weight = weight or 'TINT'
1517        mask = mask or ()
1518        scanav = (scanav and 'SCAN') or 'NONE'
[1118]1519        scan = (self, )
[1859]1520
1521        if align:
1522            scan = (self.freq_align(insitu=False), )
1523        s = None
1524        if weight.upper() == 'MEDIAN':
1525            s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1526                                                     scanav))
1527        else:
1528            s = scantable(self._math._average(scan, mask, weight.upper(),
1529                          scanav))
[1099]1530        s._add_history("average_time", varlist)
[513]1531        return s
[710]1532
[1862]1533    @asaplog_post_dec
[876]1534    def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[1846]1535        """\
[513]1536        Return a scan where all spectra are converted to either
1537        Jansky or Kelvin depending upon the flux units of the scan table.
1538        By default the function tries to look the values up internally.
1539        If it can't find them (or if you want to over-ride), you must
1540        specify EITHER jyperk OR eta (and D which it will try to look up
1541        also if you don't set it). jyperk takes precedence if you set both.
[1846]1542
[513]1543        Parameters:
[1846]1544
[513]1545            jyperk:      the Jy / K conversion factor
[1855]1546
[513]1547            eta:         the aperture efficiency
[1855]1548
[1928]1549            d:           the geometric diameter (metres)
[1855]1550
[513]1551            insitu:      if False a new scantable is returned.
1552                         Otherwise, the scaling is done in-situ
1553                         The default is taken from .asaprc (False)
[1846]1554
[513]1555        """
1556        if insitu is None: insitu = rcParams['insitu']
[876]1557        self._math._setinsitu(insitu)
[513]1558        varlist = vars()
[1593]1559        jyperk = jyperk or -1.0
1560        d = d or -1.0
1561        eta = eta or -1.0
[876]1562        s = scantable(self._math._convertflux(self, d, eta, jyperk))
1563        s._add_history("convert_flux", varlist)
1564        if insitu: self._assign(s)
1565        else: return s
[513]1566
[1862]1567    @asaplog_post_dec
[876]1568    def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[1846]1569        """\
[513]1570        Return a scan after applying a gain-elevation correction.
1571        The correction can be made via either a polynomial or a
1572        table-based interpolation (and extrapolation if necessary).
1573        You specify polynomial coefficients, an ascii table or neither.
1574        If you specify neither, then a polynomial correction will be made
1575        with built in coefficients known for certain telescopes (an error
1576        will occur if the instrument is not known).
1577        The data and Tsys are *divided* by the scaling factors.
[1846]1578
[513]1579        Parameters:
[1846]1580
[513]1581            poly:        Polynomial coefficients (default None) to compute a
1582                         gain-elevation correction as a function of
1583                         elevation (in degrees).
[1855]1584
[513]1585            filename:    The name of an ascii file holding correction factors.
1586                         The first row of the ascii file must give the column
1587                         names and these MUST include columns
1588                         "ELEVATION" (degrees) and "FACTOR" (multiply data
1589                         by this) somewhere.
1590                         The second row must give the data type of the
1591                         column. Use 'R' for Real and 'I' for Integer.
1592                         An example file would be
1593                         (actual factors are arbitrary) :
1594
1595                         TIME ELEVATION FACTOR
1596                         R R R
1597                         0.1 0 0.8
1598                         0.2 20 0.85
1599                         0.3 40 0.9
1600                         0.4 60 0.85
1601                         0.5 80 0.8
1602                         0.6 90 0.75
[1855]1603
[513]1604            method:      Interpolation method when correcting from a table.
1605                         Values are  "nearest", "linear" (default), "cubic"
1606                         and "spline"
[1855]1607
[513]1608            insitu:      if False a new scantable is returned.
1609                         Otherwise, the scaling is done in-situ
1610                         The default is taken from .asaprc (False)
[1846]1611
[513]1612        """
1613
1614        if insitu is None: insitu = rcParams['insitu']
[876]1615        self._math._setinsitu(insitu)
[513]1616        varlist = vars()
[1593]1617        poly = poly or ()
[513]1618        from os.path import expandvars
1619        filename = expandvars(filename)
[876]1620        s = scantable(self._math._gainel(self, poly, filename, method))
1621        s._add_history("gain_el", varlist)
[1593]1622        if insitu:
1623            self._assign(s)
1624        else:
1625            return s
[710]1626
[1862]1627    @asaplog_post_dec
[931]1628    def freq_align(self, reftime=None, method='cubic', insitu=None):
[1846]1629        """\
[513]1630        Return a scan where all rows have been aligned in frequency/velocity.
1631        The alignment frequency frame (e.g. LSRK) is that set by function
1632        set_freqframe.
[1846]1633
[513]1634        Parameters:
[1855]1635
[513]1636            reftime:     reference time to align at. By default, the time of
1637                         the first row of data is used.
[1855]1638
[513]1639            method:      Interpolation method for regridding the spectra.
1640                         Choose from "nearest", "linear", "cubic" (default)
1641                         and "spline"
[1855]1642
[513]1643            insitu:      if False a new scantable is returned.
1644                         Otherwise, the scaling is done in-situ
1645                         The default is taken from .asaprc (False)
[1846]1646
[513]1647        """
[931]1648        if insitu is None: insitu = rcParams["insitu"]
[876]1649        self._math._setinsitu(insitu)
[513]1650        varlist = vars()
[1593]1651        reftime = reftime or ""
[931]1652        s = scantable(self._math._freq_align(self, reftime, method))
[876]1653        s._add_history("freq_align", varlist)
1654        if insitu: self._assign(s)
1655        else: return s
[513]1656
[1862]1657    @asaplog_post_dec
[1725]1658    def opacity(self, tau=None, insitu=None):
[1846]1659        """\
[513]1660        Apply an opacity correction. The data
1661        and Tsys are multiplied by the correction factor.
[1846]1662
[513]1663        Parameters:
[1855]1664
[1689]1665            tau:         (list of) opacity from which the correction factor is
[513]1666                         exp(tau*ZD)
[1689]1667                         where ZD is the zenith-distance.
1668                         If a list is provided, it has to be of length nIF,
1669                         nIF*nPol or 1 and in order of IF/POL, e.g.
1670                         [opif0pol0, opif0pol1, opif1pol0 ...]
[1725]1671                         if tau is `None` the opacities are determined from a
1672                         model.
[1855]1673
[513]1674            insitu:      if False a new scantable is returned.
1675                         Otherwise, the scaling is done in-situ
1676                         The default is taken from .asaprc (False)
[1846]1677
[513]1678        """
1679        if insitu is None: insitu = rcParams['insitu']
[876]1680        self._math._setinsitu(insitu)
[513]1681        varlist = vars()
[1689]1682        if not hasattr(tau, "__len__"):
1683            tau = [tau]
[876]1684        s = scantable(self._math._opacity(self, tau))
1685        s._add_history("opacity", varlist)
1686        if insitu: self._assign(s)
1687        else: return s
[513]1688
[1862]1689    @asaplog_post_dec
[513]1690    def bin(self, width=5, insitu=None):
[1846]1691        """\
[513]1692        Return a scan where all spectra have been binned up.
[1846]1693
[1348]1694        Parameters:
[1846]1695
[513]1696            width:       The bin width (default=5) in pixels
[1855]1697
[513]1698            insitu:      if False a new scantable is returned.
1699                         Otherwise, the scaling is done in-situ
1700                         The default is taken from .asaprc (False)
[1846]1701
[513]1702        """
1703        if insitu is None: insitu = rcParams['insitu']
[876]1704        self._math._setinsitu(insitu)
[513]1705        varlist = vars()
[876]1706        s = scantable(self._math._bin(self, width))
[1118]1707        s._add_history("bin", varlist)
[1589]1708        if insitu:
1709            self._assign(s)
1710        else:
1711            return s
[513]1712
[1862]1713    @asaplog_post_dec
[513]1714    def resample(self, width=5, method='cubic', insitu=None):
[1846]1715        """\
[1348]1716        Return a scan where all spectra have been binned up.
[1573]1717
[1348]1718        Parameters:
[1846]1719
[513]1720            width:       The bin width (default=5) in pixels
[1855]1721
[513]1722            method:      Interpolation method when correcting from a table.
1723                         Values are  "nearest", "linear", "cubic" (default)
1724                         and "spline"
[1855]1725
[513]1726            insitu:      if False a new scantable is returned.
1727                         Otherwise, the scaling is done in-situ
1728                         The default is taken from .asaprc (False)
[1846]1729
[513]1730        """
1731        if insitu is None: insitu = rcParams['insitu']
[876]1732        self._math._setinsitu(insitu)
[513]1733        varlist = vars()
[876]1734        s = scantable(self._math._resample(self, method, width))
[1118]1735        s._add_history("resample", varlist)
[876]1736        if insitu: self._assign(s)
1737        else: return s
[513]1738
[1862]1739    @asaplog_post_dec
[946]1740    def average_pol(self, mask=None, weight='none'):
[1846]1741        """\
[946]1742        Average the Polarisations together.
[1846]1743
[946]1744        Parameters:
[1846]1745
[946]1746            mask:        An optional mask defining the region, where the
1747                         averaging will be applied. The output will have all
1748                         specified points masked.
[1855]1749
[946]1750            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1751                         weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]1752
[946]1753        """
1754        varlist = vars()
[1593]1755        mask = mask or ()
[1010]1756        s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]1757        s._add_history("average_pol", varlist)
[992]1758        return s
[513]1759
[1862]1760    @asaplog_post_dec
[1145]1761    def average_beam(self, mask=None, weight='none'):
[1846]1762        """\
[1145]1763        Average the Beams together.
[1846]1764
[1145]1765        Parameters:
1766            mask:        An optional mask defining the region, where the
1767                         averaging will be applied. The output will have all
1768                         specified points masked.
[1855]1769
[1145]1770            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1771                         weighted), or 'tsys' (1/Tsys**2 weighted)
[1846]1772
[1145]1773        """
1774        varlist = vars()
[1593]1775        mask = mask or ()
[1145]1776        s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1777        s._add_history("average_beam", varlist)
1778        return s
1779
[1586]1780    def parallactify(self, pflag):
[1846]1781        """\
[1843]1782        Set a flag to indicate whether this data should be treated as having
[1617]1783        been 'parallactified' (total phase == 0.0)
[1846]1784
[1617]1785        Parameters:
[1855]1786
[1843]1787            pflag:  Bool indicating whether to turn this on (True) or
[1617]1788                    off (False)
[1846]1789
[1617]1790        """
[1586]1791        varlist = vars()
1792        self._parallactify(pflag)
1793        self._add_history("parallactify", varlist)
1794
[1862]1795    @asaplog_post_dec
[992]1796    def convert_pol(self, poltype=None):
[1846]1797        """\
[992]1798        Convert the data to a different polarisation type.
[1565]1799        Note that you will need cross-polarisation terms for most conversions.
[1846]1800
[992]1801        Parameters:
[1855]1802
[992]1803            poltype:    The new polarisation type. Valid types are:
[1565]1804                        "linear", "circular", "stokes" and "linpol"
[1846]1805
[992]1806        """
1807        varlist = vars()
[1859]1808        s = scantable(self._math._convertpol(self, poltype))
[1118]1809        s._add_history("convert_pol", varlist)
[992]1810        return s
1811
[1862]1812    @asaplog_post_dec
[1819]1813    def smooth(self, kernel="hanning", width=5.0, order=2, plot=False, insitu=None):
[1846]1814        """\
[513]1815        Smooth the spectrum by the specified kernel (conserving flux).
[1846]1816
[513]1817        Parameters:
[1846]1818
[513]1819            kernel:     The type of smoothing kernel. Select from
[1574]1820                        'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1821                        or 'poly'
[1855]1822
[513]1823            width:      The width of the kernel in pixels. For hanning this is
1824                        ignored otherwise it defauls to 5 pixels.
1825                        For 'gaussian' it is the Full Width Half
1826                        Maximum. For 'boxcar' it is the full width.
[1574]1827                        For 'rmedian' and 'poly' it is the half width.
[1855]1828
[1574]1829            order:      Optional parameter for 'poly' kernel (default is 2), to
1830                        specify the order of the polnomial. Ignored by all other
1831                        kernels.
[1855]1832
[1819]1833            plot:       plot the original and the smoothed spectra.
1834                        In this each indivual fit has to be approved, by
1835                        typing 'y' or 'n'
[1855]1836
[513]1837            insitu:     if False a new scantable is returned.
1838                        Otherwise, the scaling is done in-situ
1839                        The default is taken from .asaprc (False)
[1846]1840
[513]1841        """
1842        if insitu is None: insitu = rcParams['insitu']
[876]1843        self._math._setinsitu(insitu)
[513]1844        varlist = vars()
[1819]1845
1846        if plot: orgscan = self.copy()
1847
[1574]1848        s = scantable(self._math._smooth(self, kernel.lower(), width, order))
[876]1849        s._add_history("smooth", varlist)
[1819]1850
1851        if plot:
1852            if rcParams['plotter.gui']:
1853                from asap.asaplotgui import asaplotgui as asaplot
1854            else:
1855                from asap.asaplot import asaplot
1856            self._p=asaplot()
1857            self._p.set_panels()
1858            ylab=s._get_ordinate_label()
1859            #self._p.palette(0,["#777777","red"])
1860            for r in xrange(s.nrow()):
1861                xsm=s._getabcissa(r)
1862                ysm=s._getspectrum(r)
1863                xorg=orgscan._getabcissa(r)
1864                yorg=orgscan._getspectrum(r)
1865                self._p.clear()
1866                self._p.hold()
1867                self._p.set_axes('ylabel',ylab)
1868                self._p.set_axes('xlabel',s._getabcissalabel(r))
1869                self._p.set_axes('title',s._getsourcename(r))
1870                self._p.set_line(label='Original',color="#777777")
1871                self._p.plot(xorg,yorg)
1872                self._p.set_line(label='Smoothed',color="red")
1873                self._p.plot(xsm,ysm)
1874                ### Ugly part for legend
1875                for i in [0,1]:
1876                    self._p.subplots[0]['lines'].append([self._p.subplots[0]['axes'].lines[i]])
1877                self._p.release()
1878                ### Ugly part for legend
1879                self._p.subplots[0]['lines']=[]
1880                res = raw_input("Accept smoothing ([y]/n): ")
1881                if res.upper() == 'N':
1882                    s._setspectrum(yorg, r)
1883            self._p.unmap()
1884            self._p = None
1885            del orgscan
1886
[876]1887        if insitu: self._assign(s)
1888        else: return s
[513]1889
[1862]1890    @asaplog_post_dec
[1907]1891    def old_poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None, rows=None):
[1846]1892        """\
[513]1893        Return a scan which has been baselined (all rows) by a polynomial.
[1907]1894       
[513]1895        Parameters:
[1846]1896
[794]1897            mask:       an optional mask
[1855]1898
[794]1899            order:      the order of the polynomial (default is 0)
[1855]1900
[1061]1901            plot:       plot the fit and the residual. In this each
1902                        indivual fit has to be approved, by typing 'y'
1903                        or 'n'
[1855]1904
[1391]1905            uselin:     use linear polynomial fit
[1855]1906
[794]1907            insitu:     if False a new scantable is returned.
1908                        Otherwise, the scaling is done in-situ
1909                        The default is taken from .asaprc (False)
[1846]1910
[1907]1911            rows:       row numbers of spectra to be processed.
1912                        (default is None: for all rows)
1913       
1914        Example:
[513]1915            # return a scan baselined by a third order polynomial,
1916            # not using a mask
1917            bscan = scan.poly_baseline(order=3)
[1846]1918
[579]1919        """
[513]1920        if insitu is None: insitu = rcParams['insitu']
[1819]1921        if not insitu:
1922            workscan = self.copy()
1923        else:
1924            workscan = self
[513]1925        varlist = vars()
1926        if mask is None:
[1907]1927            mask = [True for i in xrange(self.nchan())]
[1819]1928
[1217]1929        try:
1930            f = fitter()
[1391]1931            if uselin:
1932                f.set_function(lpoly=order)
1933            else:
1934                f.set_function(poly=order)
[1819]1935
[1907]1936            if rows == None:
1937                rows = xrange(workscan.nrow())
1938            elif isinstance(rows, int):
1939                rows = [ rows ]
1940           
[1819]1941            if len(rows) > 0:
1942                self.blpars = []
[1907]1943                self.masklists = []
1944                self.actualmask = []
1945           
[1819]1946            for r in rows:
1947                f.x = workscan._getabcissa(r)
1948                f.y = workscan._getspectrum(r)
[1907]1949                f.mask = mask_and(mask, workscan._getmask(r))    # (CAS-1434)
[1819]1950                f.data = None
1951                f.fit()
1952                if plot:
1953                    f.plot(residual=True)
1954                    x = raw_input("Accept fit ( [y]/n ): ")
1955                    if x.upper() == 'N':
1956                        self.blpars.append(None)
[1907]1957                        self.masklists.append(None)
1958                        self.actualmask.append(None)
[1819]1959                        continue
1960                workscan._setspectrum(f.fitter.getresidual(), r)
1961                self.blpars.append(f.get_parameters())
[1931]1962                self.masklists.append(workscan.get_masklist(f.mask, row=r, silent=True))
[1907]1963                self.actualmask.append(f.mask)
[1819]1964
1965            if plot:
1966                f._p.unmap()
1967                f._p = None
1968            workscan._add_history("poly_baseline", varlist)
[1856]1969            if insitu:
1970                self._assign(workscan)
1971            else:
1972                return workscan
[1217]1973        except RuntimeError:
1974            msg = "The fit failed, possibly because it didn't converge."
[1859]1975            raise RuntimeError(msg)
[513]1976
[1931]1977    @asaplog_post_dec
[1907]1978    def poly_baseline(self, mask=None, order=0, plot=False, batch=False, insitu=None, rows=None):
1979        """\
1980        Return a scan which has been baselined (all rows) by a polynomial.
1981        Parameters:
1982            mask:       an optional mask
1983            order:      the order of the polynomial (default is 0)
1984            plot:       plot the fit and the residual. In this each
1985                        indivual fit has to be approved, by typing 'y'
1986                        or 'n'. Ignored if batch = True.
1987            batch:      if True a faster algorithm is used and logs
1988                        including the fit results are not output
1989                        (default is False)
1990            insitu:     if False a new scantable is returned.
1991                        Otherwise, the scaling is done in-situ
1992                        The default is taken from .asaprc (False)
[1931]1993            rows:       row numbers of spectra to be baselined.
[1907]1994                        (default is None: for all rows)
1995        Example:
1996            # return a scan baselined by a third order polynomial,
1997            # not using a mask
1998            bscan = scan.poly_baseline(order=3)
1999        """
[1931]2000       
2001        varlist = vars()
2002       
[1907]2003        if insitu is None: insitu = rcParams["insitu"]
2004        if insitu:
2005            workscan = self
2006        else:
2007            workscan = self.copy()
2008
2009        nchan = workscan.nchan()
2010       
2011        if mask is None:
2012            mask = [True for i in xrange(nchan)]
2013
2014        try:
2015            if rows == None:
2016                rows = xrange(workscan.nrow())
2017            elif isinstance(rows, int):
2018                rows = [ rows ]
2019           
2020            if len(rows) > 0:
[1931]2021                workscan.blpars = []
2022                workscan.masklists = []
2023                workscan.actualmask = []
[1907]2024
2025            if batch:
[1931]2026                workscan._poly_baseline_batch(mask, order)
[1907]2027            elif plot:
2028                f = fitter()
2029                f.set_function(lpoly=order)
2030                for r in rows:
2031                    f.x = workscan._getabcissa(r)
2032                    f.y = workscan._getspectrum(r)
2033                    f.mask = mask_and(mask, workscan._getmask(r))    # (CAS-1434)
2034                    f.data = None
2035                    f.fit()
2036                   
2037                    f.plot(residual=True)
2038                    accept_fit = raw_input("Accept fit ( [y]/n ): ")
2039                    if accept_fit.upper() == "N":
2040                        self.blpars.append(None)
2041                        self.masklists.append(None)
2042                        self.actualmask.append(None)
2043                        continue
2044                    workscan._setspectrum(f.fitter.getresidual(), r)
[1931]2045                    workscan.blpars.append(f.get_parameters())
2046                    workscan.masklists.append(workscan.get_masklist(f.mask, row=r))
2047                    workscan.actualmask.append(f.mask)
[1907]2048                   
2049                f._p.unmap()
2050                f._p = None
2051            else:
2052                for r in rows:
[1931]2053                    fitparams = workscan._poly_baseline(mask, order, r)
2054                    params = fitparams.getparameters()
2055                    fmtd = ", ".join(["p%d = %3.6f" % (i, v) for i, v in enumerate(params)])
2056                    errors = fitparams.geterrors()
2057                    fmask = mask_and(mask, workscan._getmask(r))
2058
2059                    workscan.blpars.append({"params":params,
2060                                            "fixed": fitparams.getfixedparameters(),
2061                                            "formatted":fmtd, "errors":errors})
2062                    workscan.masklists.append(workscan.get_masklist(fmask, r, silent=True))
2063                    workscan.actualmask.append(fmask)
[1907]2064                   
[1931]2065                    asaplog.push(fmtd)
[1907]2066           
2067            workscan._add_history("poly_baseline", varlist)
2068           
2069            if insitu:
2070                self._assign(workscan)
2071            else:
2072                return workscan
2073           
[1919]2074        except RuntimeError, e:
[1907]2075            msg = "The fit failed, possibly because it didn't converge."
2076            if rcParams["verbose"]:
[1919]2077                asaplog.push(str(e))
[1907]2078                asaplog.push(str(msg))
2079                return
2080            else:
[1919]2081                raise RuntimeError(str(e)+'\n'+msg)
[1907]2082
2083
2084    def auto_poly_baseline(self, mask=None, edge=(0, 0), order=0,
[1280]2085                           threshold=3, chan_avg_limit=1, plot=False,
[1907]2086                           insitu=None, rows=None):
[1846]2087        """\
[1931]2088        Return a scan which has been baselined (all rows) by a polynomial.
[880]2089        Spectral lines are detected first using linefinder and masked out
2090        to avoid them affecting the baseline solution.
2091
2092        Parameters:
[1846]2093
[880]2094            mask:       an optional mask retreived from scantable
[1846]2095
2096            edge:       an optional number of channel to drop at the edge of
2097                        spectrum. If only one value is
[880]2098                        specified, the same number will be dropped from
2099                        both sides of the spectrum. Default is to keep
[907]2100                        all channels. Nested tuples represent individual
[976]2101                        edge selection for different IFs (a number of spectral
2102                        channels can be different)
[1846]2103
[880]2104            order:      the order of the polynomial (default is 0)
[1846]2105
[880]2106            threshold:  the threshold used by line finder. It is better to
2107                        keep it large as only strong lines affect the
2108                        baseline solution.
[1846]2109
[1280]2110            chan_avg_limit:
2111                        a maximum number of consequtive spectral channels to
2112                        average during the search of weak and broad lines.
2113                        The default is no averaging (and no search for weak
2114                        lines). If such lines can affect the fitted baseline
2115                        (e.g. a high order polynomial is fitted), increase this
2116                        parameter (usually values up to 8 are reasonable). Most
2117                        users of this method should find the default value
2118                        sufficient.
[1846]2119
[1061]2120            plot:       plot the fit and the residual. In this each
2121                        indivual fit has to be approved, by typing 'y'
2122                        or 'n'
[1846]2123
[880]2124            insitu:     if False a new scantable is returned.
2125                        Otherwise, the scaling is done in-situ
2126                        The default is taken from .asaprc (False)
[1907]2127            rows:       row numbers of spectra to be processed.
2128                        (default is None: for all rows)
[880]2129
[1846]2130
2131        Example::
2132
2133            scan2 = scan.auto_poly_baseline(order=7, insitu=False)
2134
[880]2135        """
2136        if insitu is None: insitu = rcParams['insitu']
2137        varlist = vars()
2138        from asap.asaplinefind import linefinder
2139        from asap import _is_sequence_or_number as _is_valid
2140
[976]2141        # check whether edge is set up for each IF individually
[1118]2142        individualedge = False;
2143        if len(edge) > 1:
2144            if isinstance(edge[0], list) or isinstance(edge[0], tuple):
2145                individualedge = True;
[907]2146
[1118]2147        if not _is_valid(edge, int) and not individualedge:
[909]2148            raise ValueError, "Parameter 'edge' has to be an integer or a \
[907]2149            pair of integers specified as a tuple. Nested tuples are allowed \
2150            to make individual selection for different IFs."
[919]2151
[1118]2152        curedge = (0, 0)
2153        if individualedge:
2154            for edgepar in edge:
2155                if not _is_valid(edgepar, int):
2156                    raise ValueError, "Each element of the 'edge' tuple has \
2157                                       to be a pair of integers or an integer."
[907]2158        else:
[1118]2159            curedge = edge;
[880]2160
[1907]2161        if not insitu:
2162            workscan = self.copy()
2163        else:
2164            workscan = self
2165
[880]2166        # setup fitter
2167        f = fitter()
[1907]2168        f.set_function(lpoly=order)
[880]2169
2170        # setup line finder
[1118]2171        fl = linefinder()
[1268]2172        fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
[880]2173
[907]2174        fl.set_scan(workscan)
2175
[1907]2176        if mask is None:
2177            mask = _n_bools(workscan.nchan(), True)
2178       
2179        if rows is None:
2180            rows = xrange(workscan.nrow())
2181        elif isinstance(rows, int):
2182            rows = [ rows ]
2183       
[1819]2184        # Save parameters of baseline fits & masklists as a class attribute.
2185        # NOTICE: It does not reflect changes in scantable!
2186        if len(rows) > 0:
2187            self.blpars=[]
2188            self.masklists=[]
[1907]2189            self.actualmask=[]
[880]2190        asaplog.push("Processing:")
2191        for r in rows:
[1118]2192            msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
2193                (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
2194                 workscan.getpol(r), workscan.getcycle(r))
[880]2195            asaplog.push(msg, False)
[907]2196
[976]2197            # figure out edge parameter
[1118]2198            if individualedge:
2199                if len(edge) >= workscan.getif(r):
2200                    raise RuntimeError, "Number of edge elements appear to " \
2201                                        "be less than the number of IFs"
2202                    curedge = edge[workscan.getif(r)]
[919]2203
[1907]2204            actualmask = mask_and(mask, workscan._getmask(r))    # (CAS-1434)
[1819]2205
[976]2206            # setup line finder
[1819]2207            fl.find_lines(r, actualmask, curedge)
[1907]2208           
[1819]2209            f.x = workscan._getabcissa(r)
2210            f.y = workscan._getspectrum(r)
[1907]2211            f.mask = fl.get_mask()
[1819]2212            f.data = None
[880]2213            f.fit()
[1819]2214
2215            # Show mask list
[1931]2216            masklist=workscan.get_masklist(f.mask, row=r, silent=True)
[1819]2217            msg = "mask range: "+str(masklist)
2218            asaplog.push(msg, False)
2219
[1061]2220            if plot:
2221                f.plot(residual=True)
2222                x = raw_input("Accept fit ( [y]/n ): ")
2223                if x.upper() == 'N':
[1819]2224                    self.blpars.append(None)
2225                    self.masklists.append(None)
[1907]2226                    self.actualmask.append(None)
[1061]2227                    continue
[1819]2228
[880]2229            workscan._setspectrum(f.fitter.getresidual(), r)
[1819]2230            self.blpars.append(f.get_parameters())
2231            self.masklists.append(masklist)
[1907]2232            self.actualmask.append(f.mask)
[1061]2233        if plot:
2234            f._p.unmap()
2235            f._p = None
2236        workscan._add_history("auto_poly_baseline", varlist)
[880]2237        if insitu:
2238            self._assign(workscan)
2239        else:
2240            return workscan
2241
[1862]2242    @asaplog_post_dec
[914]2243    def rotate_linpolphase(self, angle):
[1846]2244        """\
[914]2245        Rotate the phase of the complex polarization O=Q+iU correlation.
2246        This is always done in situ in the raw data.  So if you call this
2247        function more than once then each call rotates the phase further.
[1846]2248
[914]2249        Parameters:
[1846]2250
[914]2251            angle:   The angle (degrees) to rotate (add) by.
[1846]2252
2253        Example::
2254
[914]2255            scan.rotate_linpolphase(2.3)
[1846]2256
[914]2257        """
2258        varlist = vars()
[936]2259        self._math._rotate_linpolphase(self, angle)
[914]2260        self._add_history("rotate_linpolphase", varlist)
2261        return
[710]2262
[1862]2263    @asaplog_post_dec
[914]2264    def rotate_xyphase(self, angle):
[1846]2265        """\
[914]2266        Rotate the phase of the XY correlation.  This is always done in situ
2267        in the data.  So if you call this function more than once
2268        then each call rotates the phase further.
[1846]2269
[914]2270        Parameters:
[1846]2271
[914]2272            angle:   The angle (degrees) to rotate (add) by.
[1846]2273
2274        Example::
2275
[914]2276            scan.rotate_xyphase(2.3)
[1846]2277
[914]2278        """
2279        varlist = vars()
[936]2280        self._math._rotate_xyphase(self, angle)
[914]2281        self._add_history("rotate_xyphase", varlist)
2282        return
2283
[1862]2284    @asaplog_post_dec
[914]2285    def swap_linears(self):
[1846]2286        """\
[1573]2287        Swap the linear polarisations XX and YY, or better the first two
[1348]2288        polarisations as this also works for ciculars.
[914]2289        """
2290        varlist = vars()
[936]2291        self._math._swap_linears(self)
[914]2292        self._add_history("swap_linears", varlist)
2293        return
2294
[1862]2295    @asaplog_post_dec
[914]2296    def invert_phase(self):
[1846]2297        """\
[914]2298        Invert the phase of the complex polarisation
2299        """
2300        varlist = vars()
[936]2301        self._math._invert_phase(self)
[914]2302        self._add_history("invert_phase", varlist)
2303        return
2304
[1862]2305    @asaplog_post_dec
[876]2306    def add(self, offset, insitu=None):
[1846]2307        """\
[513]2308        Return a scan where all spectra have the offset added
[1846]2309
[513]2310        Parameters:
[1846]2311
[513]2312            offset:      the offset
[1855]2313
[513]2314            insitu:      if False a new scantable is returned.
2315                         Otherwise, the scaling is done in-situ
2316                         The default is taken from .asaprc (False)
[1846]2317
[513]2318        """
2319        if insitu is None: insitu = rcParams['insitu']
[876]2320        self._math._setinsitu(insitu)
[513]2321        varlist = vars()
[876]2322        s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]2323        s._add_history("add", varlist)
[876]2324        if insitu:
2325            self._assign(s)
2326        else:
[513]2327            return s
2328
[1862]2329    @asaplog_post_dec
[1308]2330    def scale(self, factor, tsys=True, insitu=None):
[1846]2331        """\
2332
[1938]2333        Return a scan where all spectra are scaled by the given 'factor'
[1846]2334
[513]2335        Parameters:
[1846]2336
[1819]2337            factor:      the scaling factor (float or 1D float list)
[1855]2338
[513]2339            insitu:      if False a new scantable is returned.
2340                         Otherwise, the scaling is done in-situ
2341                         The default is taken from .asaprc (False)
[1855]2342
[513]2343            tsys:        if True (default) then apply the operation to Tsys
2344                         as well as the data
[1846]2345
[513]2346        """
2347        if insitu is None: insitu = rcParams['insitu']
[876]2348        self._math._setinsitu(insitu)
[513]2349        varlist = vars()
[1819]2350        s = None
2351        import numpy
2352        if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
2353            if isinstance(factor[0], list) or isinstance(factor[0], numpy.ndarray):
2354                from asapmath import _array2dOp
2355                s = _array2dOp( self.copy(), factor, "MUL", tsys )
2356            else:
2357                s = scantable( self._math._arrayop( self.copy(), factor, "MUL", tsys ) )
2358        else:
2359            s = scantable(self._math._unaryop(self.copy(), factor, "MUL", tsys))
[1118]2360        s._add_history("scale", varlist)
[876]2361        if insitu:
2362            self._assign(s)
2363        else:
[513]2364            return s
2365
[1504]2366    def set_sourcetype(self, match, matchtype="pattern",
2367                       sourcetype="reference"):
[1846]2368        """\
[1502]2369        Set the type of the source to be an source or reference scan
[1846]2370        using the provided pattern.
2371
[1502]2372        Parameters:
[1846]2373
[1504]2374            match:          a Unix style pattern, regular expression or selector
[1855]2375
[1504]2376            matchtype:      'pattern' (default) UNIX style pattern or
2377                            'regex' regular expression
[1855]2378
[1502]2379            sourcetype:     the type of the source to use (source/reference)
[1846]2380
[1502]2381        """
2382        varlist = vars()
2383        basesel = self.get_selection()
2384        stype = -1
2385        if sourcetype.lower().startswith("r"):
2386            stype = 1
2387        elif sourcetype.lower().startswith("s"):
2388            stype = 0
[1504]2389        else:
[1502]2390            raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
[1504]2391        if matchtype.lower().startswith("p"):
2392            matchtype = "pattern"
2393        elif matchtype.lower().startswith("r"):
2394            matchtype = "regex"
2395        else:
2396            raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
[1502]2397        sel = selector()
2398        if isinstance(match, selector):
2399            sel = match
2400        else:
[1504]2401            sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
[1502]2402        self.set_selection(basesel+sel)
2403        self._setsourcetype(stype)
2404        self.set_selection(basesel)
[1573]2405        self._add_history("set_sourcetype", varlist)
[1502]2406
[1862]2407    @asaplog_post_dec
[1857]2408    @preserve_selection
[1819]2409    def auto_quotient(self, preserve=True, mode='paired', verify=False):
[1846]2410        """\
[670]2411        This function allows to build quotients automatically.
[1819]2412        It assumes the observation to have the same number of
[670]2413        "ons" and "offs"
[1846]2414
[670]2415        Parameters:
[1846]2416
[710]2417            preserve:       you can preserve (default) the continuum or
2418                            remove it.  The equations used are
[1857]2419
[670]2420                            preserve: Output = Toff * (on/off) - Toff
[1857]2421
[1070]2422                            remove:   Output = Toff * (on/off) - Ton
[1855]2423
[1573]2424            mode:           the on/off detection mode
[1348]2425                            'paired' (default)
2426                            identifies 'off' scans by the
2427                            trailing '_R' (Mopra/Parkes) or
2428                            '_e'/'_w' (Tid) and matches
2429                            on/off pairs from the observing pattern
[1502]2430                            'time'
2431                            finds the closest off in time
[1348]2432
[1857]2433        .. todo:: verify argument is not implemented
2434
[670]2435        """
[1857]2436        varlist = vars()
[1348]2437        modes = ["time", "paired"]
[670]2438        if not mode in modes:
[876]2439            msg = "please provide valid mode. Valid modes are %s" % (modes)
2440            raise ValueError(msg)
[1348]2441        s = None
2442        if mode.lower() == "paired":
[1857]2443            sel = self.get_selection()
[1875]2444            sel.set_query("SRCTYPE==psoff")
[1356]2445            self.set_selection(sel)
[1348]2446            offs = self.copy()
[1875]2447            sel.set_query("SRCTYPE==pson")
[1356]2448            self.set_selection(sel)
[1348]2449            ons = self.copy()
2450            s = scantable(self._math._quotient(ons, offs, preserve))
2451        elif mode.lower() == "time":
2452            s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]2453        s._add_history("auto_quotient", varlist)
[876]2454        return s
[710]2455
[1862]2456    @asaplog_post_dec
[1145]2457    def mx_quotient(self, mask = None, weight='median', preserve=True):
[1846]2458        """\
[1143]2459        Form a quotient using "off" beams when observing in "MX" mode.
[1846]2460
[1143]2461        Parameters:
[1846]2462
[1145]2463            mask:           an optional mask to be used when weight == 'stddev'
[1855]2464
[1143]2465            weight:         How to average the off beams.  Default is 'median'.
[1855]2466
[1145]2467            preserve:       you can preserve (default) the continuum or
[1855]2468                            remove it.  The equations used are:
[1846]2469
[1855]2470                                preserve: Output = Toff * (on/off) - Toff
2471
2472                                remove:   Output = Toff * (on/off) - Ton
2473
[1217]2474        """
[1593]2475        mask = mask or ()
[1141]2476        varlist = vars()
2477        on = scantable(self._math._mx_extract(self, 'on'))
[1143]2478        preoff = scantable(self._math._mx_extract(self, 'off'))
2479        off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1217]2480        from asapmath  import quotient
[1145]2481        q = quotient(on, off, preserve)
[1143]2482        q._add_history("mx_quotient", varlist)
[1217]2483        return q
[513]2484
[1862]2485    @asaplog_post_dec
[718]2486    def freq_switch(self, insitu=None):
[1846]2487        """\
[718]2488        Apply frequency switching to the data.
[1846]2489
[718]2490        Parameters:
[1846]2491
[718]2492            insitu:      if False a new scantable is returned.
2493                         Otherwise, the swictching is done in-situ
2494                         The default is taken from .asaprc (False)
[1846]2495
[718]2496        """
2497        if insitu is None: insitu = rcParams['insitu']
[876]2498        self._math._setinsitu(insitu)
[718]2499        varlist = vars()
[876]2500        s = scantable(self._math._freqswitch(self))
[1118]2501        s._add_history("freq_switch", varlist)
[1856]2502        if insitu:
2503            self._assign(s)
2504        else:
2505            return s
[718]2506
[1862]2507    @asaplog_post_dec
[780]2508    def recalc_azel(self):
[1846]2509        """Recalculate the azimuth and elevation for each position."""
[780]2510        varlist = vars()
[876]2511        self._recalcazel()
[780]2512        self._add_history("recalc_azel", varlist)
2513        return
2514
[1862]2515    @asaplog_post_dec
[513]2516    def __add__(self, other):
2517        varlist = vars()
2518        s = None
2519        if isinstance(other, scantable):
[1573]2520            s = scantable(self._math._binaryop(self, other, "ADD"))
[513]2521        elif isinstance(other, float):
[876]2522            s = scantable(self._math._unaryop(self, other, "ADD", False))
[513]2523        else:
[718]2524            raise TypeError("Other input is not a scantable or float value")
[513]2525        s._add_history("operator +", varlist)
2526        return s
2527
[1862]2528    @asaplog_post_dec
[513]2529    def __sub__(self, other):
2530        """
2531        implicit on all axes and on Tsys
2532        """
2533        varlist = vars()
2534        s = None
2535        if isinstance(other, scantable):
[1588]2536            s = scantable(self._math._binaryop(self, other, "SUB"))
[513]2537        elif isinstance(other, float):
[876]2538            s = scantable(self._math._unaryop(self, other, "SUB", False))
[513]2539        else:
[718]2540            raise TypeError("Other input is not a scantable or float value")
[513]2541        s._add_history("operator -", varlist)
2542        return s
[710]2543
[1862]2544    @asaplog_post_dec
[513]2545    def __mul__(self, other):
2546        """
2547        implicit on all axes and on Tsys
2548        """
2549        varlist = vars()
2550        s = None
2551        if isinstance(other, scantable):
[1588]2552            s = scantable(self._math._binaryop(self, other, "MUL"))
[513]2553        elif isinstance(other, float):
[876]2554            s = scantable(self._math._unaryop(self, other, "MUL", False))
[513]2555        else:
[718]2556            raise TypeError("Other input is not a scantable or float value")
[513]2557        s._add_history("operator *", varlist)
2558        return s
2559
[710]2560
[1862]2561    @asaplog_post_dec
[513]2562    def __div__(self, other):
2563        """
2564        implicit on all axes and on Tsys
2565        """
2566        varlist = vars()
2567        s = None
2568        if isinstance(other, scantable):
[1589]2569            s = scantable(self._math._binaryop(self, other, "DIV"))
[513]2570        elif isinstance(other, float):
2571            if other == 0.0:
[718]2572                raise ZeroDivisionError("Dividing by zero is not recommended")
[876]2573            s = scantable(self._math._unaryop(self, other, "DIV", False))
[513]2574        else:
[718]2575            raise TypeError("Other input is not a scantable or float value")
[513]2576        s._add_history("operator /", varlist)
2577        return s
2578
[1862]2579    @asaplog_post_dec
[530]2580    def get_fit(self, row=0):
[1846]2581        """\
[530]2582        Print or return the stored fits for a row in the scantable
[1846]2583
[530]2584        Parameters:
[1846]2585
[530]2586            row:    the row which the fit has been applied to.
[1846]2587
[530]2588        """
2589        if row > self.nrow():
2590            return
[976]2591        from asap.asapfit import asapfit
[530]2592        fit = asapfit(self._getfit(row))
[1859]2593        asaplog.push( '%s' %(fit) )
2594        return fit.as_dict()
[530]2595
[1483]2596    def flag_nans(self):
[1846]2597        """\
[1483]2598        Utility function to flag NaN values in the scantable.
2599        """
2600        import numpy
2601        basesel = self.get_selection()
2602        for i in range(self.nrow()):
[1589]2603            sel = self.get_row_selector(i)
2604            self.set_selection(basesel+sel)
[1483]2605            nans = numpy.isnan(self._getspectrum(0))
2606        if numpy.any(nans):
2607            bnans = [ bool(v) for v in nans]
2608            self.flag(bnans)
2609        self.set_selection(basesel)
2610
[1588]2611    def get_row_selector(self, rowno):
[1992]2612        #return selector(beams=self.getbeam(rowno),
2613        #                ifs=self.getif(rowno),
2614        #                pols=self.getpol(rowno),
2615        #                scans=self.getscan(rowno),
2616        #                cycles=self.getcycle(rowno))
2617        return selector(rows=[rowno])
[1573]2618
[484]2619    def _add_history(self, funcname, parameters):
[1435]2620        if not rcParams['scantable.history']:
2621            return
[484]2622        # create date
2623        sep = "##"
2624        from datetime import datetime
2625        dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
2626        hist = dstr+sep
2627        hist += funcname+sep#cdate+sep
2628        if parameters.has_key('self'): del parameters['self']
[1118]2629        for k, v in parameters.iteritems():
[484]2630            if type(v) is dict:
[1118]2631                for k2, v2 in v.iteritems():
[484]2632                    hist += k2
2633                    hist += "="
[1118]2634                    if isinstance(v2, scantable):
[484]2635                        hist += 'scantable'
2636                    elif k2 == 'mask':
[1118]2637                        if isinstance(v2, list) or isinstance(v2, tuple):
[513]2638                            hist += str(self._zip_mask(v2))
2639                        else:
2640                            hist += str(v2)
[484]2641                    else:
[513]2642                        hist += str(v2)
[484]2643            else:
2644                hist += k
2645                hist += "="
[1118]2646                if isinstance(v, scantable):
[484]2647                    hist += 'scantable'
2648                elif k == 'mask':
[1118]2649                    if isinstance(v, list) or isinstance(v, tuple):
[513]2650                        hist += str(self._zip_mask(v))
2651                    else:
2652                        hist += str(v)
[484]2653                else:
2654                    hist += str(v)
2655            hist += sep
2656        hist = hist[:-2] # remove trailing '##'
2657        self._addhistory(hist)
2658
[710]2659
[484]2660    def _zip_mask(self, mask):
2661        mask = list(mask)
2662        i = 0
2663        segments = []
2664        while mask[i:].count(1):
2665            i += mask[i:].index(1)
2666            if mask[i:].count(0):
2667                j = i + mask[i:].index(0)
2668            else:
[710]2669                j = len(mask)
[1118]2670            segments.append([i, j])
[710]2671            i = j
[484]2672        return segments
[714]2673
[626]2674    def _get_ordinate_label(self):
2675        fu = "("+self.get_fluxunit()+")"
2676        import re
2677        lbl = "Intensity"
[1118]2678        if re.match(".K.", fu):
[626]2679            lbl = "Brightness Temperature "+ fu
[1118]2680        elif re.match(".Jy.", fu):
[626]2681            lbl = "Flux density "+ fu
2682        return lbl
[710]2683
[876]2684    def _check_ifs(self):
[1986]2685        #nchans = [self.nchan(i) for i in range(self.nif(-1))]
2686        #nchans = filter(lambda t: t > 0, nchans)
2687        nchans = [self.nchan(i) for i in self.getifnos()]
[876]2688        return (sum(nchans)/len(nchans) == nchans[0])
[976]2689
[1862]2690    @asaplog_post_dec
[1916]2691    #def _fill(self, names, unit, average, getpt, antenna):
2692    def _fill(self, names, unit, average, opts={}):
[976]2693        first = True
2694        fullnames = []
2695        for name in names:
2696            name = os.path.expandvars(name)
2697            name = os.path.expanduser(name)
2698            if not os.path.exists(name):
2699                msg = "File '%s' does not exists" % (name)
2700                raise IOError(msg)
2701            fullnames.append(name)
2702        if average:
2703            asaplog.push('Auto averaging integrations')
[1079]2704        stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]2705        for name in fullnames:
[1073]2706            tbl = Scantable(stype)
[1843]2707            r = filler(tbl)
[1504]2708            rx = rcParams['scantable.reference']
[1843]2709            r.setreferenceexpr(rx)
[976]2710            msg = "Importing %s..." % (name)
[1118]2711            asaplog.push(msg, False)
[1916]2712            #opts = {'ms': {'antenna' : antenna, 'getpt': getpt} }
[1904]2713            r.open(name, opts)# antenna, -1, -1, getpt)
[1843]2714            r.fill()
[976]2715            if average:
[1118]2716                tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]2717            if not first:
2718                tbl = self._math._merge([self, tbl])
2719            Scantable.__init__(self, tbl)
[1843]2720            r.close()
[1118]2721            del r, tbl
[976]2722            first = False
[1861]2723            #flush log
2724        asaplog.post()
[976]2725        if unit is not None:
2726            self.set_fluxunit(unit)
[1824]2727        if not is_casapy():
2728            self.set_freqframe(rcParams['scantable.freqframe'])
[976]2729
[1402]2730    def __getitem__(self, key):
2731        if key < 0:
2732            key += self.nrow()
2733        if key >= self.nrow():
2734            raise IndexError("Row index out of range.")
2735        return self._getspectrum(key)
2736
2737    def __setitem__(self, key, value):
2738        if key < 0:
2739            key += self.nrow()
2740        if key >= self.nrow():
2741            raise IndexError("Row index out of range.")
2742        if not hasattr(value, "__len__") or \
2743                len(value) > self.nchan(self.getif(key)):
2744            raise ValueError("Spectrum length doesn't match.")
2745        return self._setspectrum(value, key)
2746
2747    def __len__(self):
2748        return self.nrow()
2749
2750    def __iter__(self):
2751        for i in range(len(self)):
2752            yield self[i]
Note: See TracBrowser for help on using the repository browser.