source: trunk/python/scantable.py @ 1908

Last change on this file since 1908 was 1908, checked in by WataruKawasaki, 14 years ago

New Development: No

JIRA Issue: Yes CAS-1937, CAS-2373

Ready to Release: Yes

Interface Changes: No

What Interface Changed:

Test Programs:

Put in Release Notes: No

Module(s): sdbaseline

Description: Changing some function names


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 92.2 KB
Line 
1"""This module defines the scantable class."""
2
3import os
4try:
5    from functools import wraps as wraps_dec
6except ImportError:
7    from asap.compatibility import wraps as wraps_dec
8
9from asap.env import is_casapy
10from asap._asap import Scantable
11from asap._asap import filler
12from asap.parameters import rcParams
13from asap.logging import asaplog, asaplog_post_dec
14from asap.selector import selector
15from asap.linecatalog import linecatalog
16from asap.coordinate import coordinate
17from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
18from asap.asapfitter import fitter
19
20
21def preserve_selection(func):
22    @wraps_dec(func)
23    def wrap(obj, *args, **kw):
24        basesel = obj.get_selection()
25        try:
26            val = func(obj, *args, **kw)
27        finally:
28            obj.set_selection(basesel)
29        return val
30    return wrap
31
32def is_scantable(filename):
33    """Is the given file a scantable?
34
35    Parameters:
36
37        filename: the name of the file/directory to test
38
39    """
40    if ( os.path.isdir(filename)
41         and os.path.exists(filename+'/table.info')
42         and os.path.exists(filename+'/table.dat') ):
43        f=open(filename+'/table.info')
44        l=f.readline()
45        f.close()
46        #if ( l.find('Scantable') != -1 ):
47        if ( l.find('Measurement Set') == -1 ):
48            return True
49        else:
50            return False
51    else:
52        return False
53##     return (os.path.isdir(filename)
54##             and not os.path.exists(filename+'/table.f1')
55##             and os.path.exists(filename+'/table.info'))
56
57def is_ms(filename):
58    """Is the given file a MeasurementSet?
59
60    Parameters:
61
62        filename: the name of the file/directory to test
63
64    """
65    if ( os.path.isdir(filename)
66         and os.path.exists(filename+'/table.info')
67         and os.path.exists(filename+'/table.dat') ):
68        f=open(filename+'/table.info')
69        l=f.readline()
70        f.close()
71        if ( l.find('Measurement Set') != -1 ):
72            return True
73        else:
74            return False
75    else:
76        return False
77   
78class scantable(Scantable):
79    """\
80        The ASAP container for scans (single-dish data).
81    """
82
83    @asaplog_post_dec
84    def __init__(self, filename, average=None, unit=None, getpt=None,
85                 antenna=None, parallactify=None):
86        """\
87        Create a scantable from a saved one or make a reference
88
89        Parameters:
90
91            filename:     the name of an asap table on disk
92                          or
93                          the name of a rpfits/sdfits/ms file
94                          (integrations within scans are auto averaged
95                          and the whole file is read) or
96                          [advanced] a reference to an existing scantable
97
98            average:      average all integrations withinb a scan on read.
99                          The default (True) is taken from .asaprc.
100
101            unit:         brightness unit; must be consistent with K or Jy.
102                          Over-rides the default selected by the filler
103                          (input rpfits/sdfits/ms) or replaces the value
104                          in existing scantables
105
106            getpt:        for MeasurementSet input data only:
107                          If True, all pointing data are filled.
108                          The deafult is False, which makes time to load
109                          the MS data faster in some cases.
110
111            antenna:      Antenna selection. integer (id) or string (name or id).
112
113            parallactify: Indicate that the data had been parallatified. Default
114                          is taken from rc file.
115
116        """
117        if average is None:
118            average = rcParams['scantable.autoaverage']
119        if getpt is None:
120            getpt = True
121        if antenna is not None:
122            asaplog.push("Antenna selection currently unsupported."
123                         "Using ''")
124            asaplog.post('WARN')
125        if antenna is None:
126            antenna = ''
127        elif type(antenna) == int:
128            antenna = '%s' % antenna
129        elif type(antenna) == list:
130            tmpstr = ''
131            for i in range( len(antenna) ):
132                if type(antenna[i]) == int:
133                    tmpstr = tmpstr + ('%s,'%(antenna[i]))
134                elif type(antenna[i]) == str:
135                    tmpstr=tmpstr+antenna[i]+','
136                else:
137                    raise TypeError('Bad antenna selection.')
138            antenna = tmpstr.rstrip(',')
139        parallactify = parallactify or rcParams['scantable.parallactify']
140        varlist = vars()
141        from asap._asap import stmath
142        self._math = stmath( rcParams['insitu'] )
143        if isinstance(filename, Scantable):
144            Scantable.__init__(self, filename)
145        else:
146            if isinstance(filename, str):
147                filename = os.path.expandvars(filename)
148                filename = os.path.expanduser(filename)
149                if not os.path.exists(filename):
150                    s = "File '%s' not found." % (filename)
151                    raise IOError(s)
152                if is_scantable(filename):
153                    ondisk = rcParams['scantable.storage'] == 'disk'
154                    Scantable.__init__(self, filename, ondisk)
155                    if unit is not None:
156                        self.set_fluxunit(unit)
157                    # do not reset to the default freqframe
158                    #self.set_freqframe(rcParams['scantable.freqframe'])
159                #elif os.path.isdir(filename) \
160                #         and not os.path.exists(filename+'/table.f1'):
161                elif is_ms(filename):
162                    self._fill([filename], unit, average, getpt, antenna)
163                elif os.path.isfile(filename):
164                    self._fill([filename], unit, average, getpt, antenna)
165                else:
166                    msg = "The given file '%s'is not a valid " \
167                          "asap table." % (filename)
168                    raise IOError(msg)
169            elif (isinstance(filename, list) or isinstance(filename, tuple)) \
170                  and isinstance(filename[-1], str):
171                self._fill(filename, unit, average, getpt, antenna)
172        self.parallactify(parallactify)
173        self._add_history("scantable", varlist)
174
175    @asaplog_post_dec
176    def save(self, name=None, format=None, overwrite=False):
177        """\
178        Store the scantable on disk. This can be an asap (aips++) Table,
179        SDFITS or MS2 format.
180
181        Parameters:
182
183            name:        the name of the outputfile. For format "ASCII"
184                         this is the root file name (data in 'name'.txt
185                         and header in 'name'_header.txt)
186
187            format:      an optional file format. Default is ASAP.
188                         Allowed are:
189
190                            * 'ASAP' (save as ASAP [aips++] Table),
191                            * 'SDFITS' (save as SDFITS file)
192                            * 'ASCII' (saves as ascii text file)
193                            * 'MS2' (saves as an casacore MeasurementSet V2)
194                            * 'FITS' (save as image FITS - not readable by class)
195                            * 'CLASS' (save as FITS readable by CLASS)
196
197            overwrite:   If the file should be overwritten if it exists.
198                         The default False is to return with warning
199                         without writing the output. USE WITH CARE.
200
201        Example::
202
203            scan.save('myscan.asap')
204            scan.save('myscan.sdfits', 'SDFITS')
205
206        """
207        from os import path
208        format = format or rcParams['scantable.save']
209        suffix = '.'+format.lower()
210        if name is None or name == "":
211            name = 'scantable'+suffix
212            msg = "No filename given. Using default name %s..." % name
213            asaplog.push(msg)
214        name = path.expandvars(name)
215        if path.isfile(name) or path.isdir(name):
216            if not overwrite:
217                msg = "File %s exists." % name
218                raise IOError(msg)
219        format2 = format.upper()
220        if format2 == 'ASAP':
221            self._save(name)
222        else:
223            from asap._asap import stwriter as stw
224            writer = stw(format2)
225            writer.write(self, name)
226        return
227
228    def copy(self):
229        """Return a copy of this scantable.
230
231        *Note*:
232
233            This makes a full (deep) copy. scan2 = scan1 makes a reference.
234
235        Example::
236
237            copiedscan = scan.copy()
238
239        """
240        sd = scantable(Scantable._copy(self))
241        return sd
242
243    def drop_scan(self, scanid=None):
244        """\
245        Return a new scantable where the specified scan number(s) has(have)
246        been dropped.
247
248        Parameters:
249
250            scanid:    a (list of) scan number(s)
251
252        """
253        from asap import _is_sequence_or_number as _is_valid
254        from asap import _to_list
255        from asap import unique
256        if not _is_valid(scanid):
257            raise RuntimeError( 'Please specify a scanno to drop from the scantable' )
258        scanid = _to_list(scanid)
259        allscans = unique([ self.getscan(i) for i in range(self.nrow())])
260        for sid in scanid: allscans.remove(sid)
261        if len(allscans) == 0:
262            raise ValueError("Can't remove all scans")
263        sel = selector(scans=allscans)
264        return self._select_copy(sel)
265
266    def _select_copy(self, selection):
267        orig = self.get_selection()
268        self.set_selection(orig+selection)
269        cp = self.copy()
270        self.set_selection(orig)
271        return cp
272
273    def get_scan(self, scanid=None):
274        """\
275        Return a specific scan (by scanno) or collection of scans (by
276        source name) in a new scantable.
277
278        *Note*:
279
280            See scantable.drop_scan() for the inverse operation.
281
282        Parameters:
283
284            scanid:    a (list of) scanno or a source name, unix-style
285                       patterns are accepted for source name matching, e.g.
286                       '*_R' gets all 'ref scans
287
288        Example::
289
290            # get all scans containing the source '323p459'
291            newscan = scan.get_scan('323p459')
292            # get all 'off' scans
293            refscans = scan.get_scan('*_R')
294            # get a susbset of scans by scanno (as listed in scan.summary())
295            newscan = scan.get_scan([0, 2, 7, 10])
296
297        """
298        if scanid is None:
299            raise RuntimeError( 'Please specify a scan no or name to '
300                                'retrieve from the scantable' )
301        try:
302            bsel = self.get_selection()
303            sel = selector()
304            if type(scanid) is str:
305                sel.set_name(scanid)
306                return self._select_copy(sel)
307            elif type(scanid) is int:
308                sel.set_scans([scanid])
309                return self._select_copy(sel)
310            elif type(scanid) is list:
311                sel.set_scans(scanid)
312                return self._select_copy(sel)
313            else:
314                msg = "Illegal scanid type, use 'int' or 'list' if ints."
315                raise TypeError(msg)
316        except RuntimeError:
317            raise
318
319    def __str__(self):
320        return Scantable._summary(self, True)
321
322    def summary(self, filename=None):
323        """\
324        Print a summary of the contents of this scantable.
325
326        Parameters:
327
328            filename:    the name of a file to write the putput to
329                         Default - no file output
330
331        """
332        info = Scantable._summary(self, True)
333        if filename is not None:
334            if filename is "":
335                filename = 'scantable_summary.txt'
336            from os.path import expandvars, isdir
337            filename = expandvars(filename)
338            if not isdir(filename):
339                data = open(filename, 'w')
340                data.write(info)
341                data.close()
342            else:
343                msg = "Illegal file name '%s'." % (filename)
344                raise IOError(msg)
345        return page(info)
346
347    def get_spectrum(self, rowno):
348        """Return the spectrum for the current row in the scantable as a list.
349
350        Parameters:
351
352             rowno:   the row number to retrieve the spectrum from
353
354        """
355        return self._getspectrum(rowno)
356
357    def get_mask(self, rowno):
358        """Return the mask for the current row in the scantable as a list.
359
360        Parameters:
361
362             rowno:   the row number to retrieve the mask from
363
364        """
365        return self._getmask(rowno)
366
367    def set_spectrum(self, spec, rowno):
368        """Return the spectrum for the current row in the scantable as a list.
369
370        Parameters:
371
372             spec:   the new spectrum
373
374             rowno:  the row number to set the spectrum for
375
376        """
377        assert(len(spec) == self.nchan())
378        return self._setspectrum(spec, rowno)
379
380    def get_coordinate(self, rowno):
381        """Return the (spectral) coordinate for a a given 'rowno'.
382
383        *Note*:
384
385            * This coordinate is only valid until a scantable method modifies
386              the frequency axis.
387            * This coordinate does contain the original frequency set-up
388              NOT the new frame. The conversions however are done using the user
389              specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
390              use scantable.freq_align first. Without it there is no closure,
391              i.e.::
392
393                  c = myscan.get_coordinate(0)
394                  c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
395
396        Parameters:
397
398             rowno:    the row number for the spectral coordinate
399
400        """
401        return coordinate(Scantable.get_coordinate(self, rowno))
402
403    def get_selection(self):
404        """\
405        Get the selection object currently set on this scantable.
406
407        Example::
408
409            sel = scan.get_selection()
410            sel.set_ifs(0)              # select IF 0
411            scan.set_selection(sel)     # apply modified selection
412
413        """
414        return selector(self._getselection())
415
416    def set_selection(self, selection=None, **kw):
417        """\
418        Select a subset of the data. All following operations on this scantable
419        are only applied to thi selection.
420
421        Parameters:
422
423            selection:    a selector object (default unset the selection), or
424                          any combination of "pols", "ifs", "beams", "scans",
425                          "cycles", "name", "query"
426
427        Examples::
428
429            sel = selector()         # create a selection object
430            self.set_scans([0, 3])    # select SCANNO 0 and 3
431            scan.set_selection(sel)  # set the selection
432            scan.summary()           # will only print summary of scanno 0 an 3
433            scan.set_selection()     # unset the selection
434            # or the equivalent
435            scan.set_selection(scans=[0,3])
436            scan.summary()           # will only print summary of scanno 0 an 3
437            scan.set_selection()     # unset the selection
438
439        """
440        if selection is None:
441            # reset
442            if len(kw) == 0:
443                selection = selector()
444            else:
445                # try keywords
446                for k in kw:
447                    if k not in selector.fields:
448                        raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
449                selection = selector(**kw)
450        self._setselection(selection)
451
452    def get_row(self, row=0, insitu=None):
453        """\
454        Select a row in the scantable.
455        Return a scantable with single row.
456
457        Parameters:
458
459            row:    row no of integration, default is 0.
460            insitu: if False a new scantable is returned. Otherwise, the
461                    scaling is done in-situ. The default is taken from .asaprc
462                    (False)
463
464        """
465        if insitu is None: insitu = rcParams['insitu']
466        if not insitu:
467            workscan = self.copy()
468        else:
469            workscan = self
470        # Select a row
471        sel=selector()
472        sel.set_scans([workscan.getscan(row)])
473        sel.set_cycles([workscan.getcycle(row)])
474        sel.set_beams([workscan.getbeam(row)])
475        sel.set_ifs([workscan.getif(row)])
476        sel.set_polarisations([workscan.getpol(row)])
477        sel.set_name(workscan._getsourcename(row))
478        workscan.set_selection(sel)
479        if not workscan.nrow() == 1:
480            msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
481            raise RuntimeError(msg)
482        del sel
483        if insitu:
484            self._assign(workscan)
485        else:
486            return workscan
487
488    @asaplog_post_dec
489    def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
490        """\
491        Determine the specified statistic of the current beam/if/pol
492        Takes a 'mask' as an optional parameter to specify which
493        channels should be excluded.
494
495        Parameters:
496
497            stat:    'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
498                     'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
499
500            mask:    an optional mask specifying where the statistic
501                     should be determined.
502
503            form:    format string to print statistic values
504
505            row:     row number of spectrum to process.
506                     (default is None: for all rows)
507
508        Example:
509            scan.set_unit('channel')
510            msk = scan.create_mask([100, 200], [500, 600])
511            scan.stats(stat='mean', mask=m)
512
513        """
514        mask = mask or []
515        if not self._check_ifs():
516            raise ValueError("Cannot apply mask as the IFs have different "
517                             "number of channels. Please use setselection() "
518                             "to select individual IFs")
519        rtnabc = False
520        if stat.lower().endswith('_abc'): rtnabc = True
521        getchan = False
522        if stat.lower().startswith('min') or stat.lower().startswith('max'):
523            chan = self._math._minmaxchan(self, mask, stat)
524            getchan = True
525            statvals = []
526        if not rtnabc:
527            if row == None:
528                statvals = self._math._stats(self, mask, stat)
529            else:
530                statvals = self._math._statsrow(self, mask, stat, int(row))
531
532        #def cb(i):
533        #    return statvals[i]
534
535        #return self._row_callback(cb, stat)
536
537        label=stat
538        #callback=cb
539        out = ""
540        #outvec = []
541        sep = '-'*50
542
543        if row == None:
544            rows = xrange(self.nrow())
545        elif isinstance(row, int):
546            rows = [ row ]
547
548        for i in rows:
549            refstr = ''
550            statunit= ''
551            if getchan:
552                qx, qy = self.chan2data(rowno=i, chan=chan[i])
553                if rtnabc:
554                    statvals.append(qx['value'])
555                    refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
556                    statunit= '['+qx['unit']+']'
557                else:
558                    refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
559
560            tm = self._gettime(i)
561            src = self._getsourcename(i)
562            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
563            out += 'Time[%s]:\n' % (tm)
564            if self.nbeam(-1) > 1: out +=  ' Beam[%d] ' % (self.getbeam(i))
565            if self.nif(-1) > 1:   out +=  ' IF[%d] ' % (self.getif(i))
566            if self.npol(-1) > 1:  out +=  ' Pol[%d] ' % (self.getpol(i))
567            #outvec.append(callback(i))
568            if len(rows) > 1:
569                # out += ('= %'+form) % (outvec[i]) +'   '+refstr+'\n'
570                out += ('= %'+form) % (statvals[i]) +'   '+refstr+'\n'
571            else:
572                # out += ('= %'+form) % (outvec[0]) +'   '+refstr+'\n'
573                out += ('= %'+form) % (statvals[0]) +'   '+refstr+'\n'
574            out +=  sep+"\n"
575
576        import os
577        if os.environ.has_key( 'USER' ):
578            usr = os.environ['USER']
579        else:
580            import commands
581            usr = commands.getoutput( 'whoami' )
582        tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
583        f = open(tmpfile,'w')
584        print >> f, sep
585        print >> f, ' %s %s' % (label, statunit)
586        print >> f, sep
587        print >> f, out
588        f.close()
589        f = open(tmpfile,'r')
590        x = f.readlines()
591        f.close()
592        asaplog.push(''.join(x), False)
593
594        return statvals
595
596    def chan2data(self, rowno=0, chan=0):
597        """\
598        Returns channel/frequency/velocity and spectral value
599        at an arbitrary row and channel in the scantable.
600
601        Parameters:
602
603            rowno:   a row number in the scantable. Default is the
604                     first row, i.e. rowno=0
605
606            chan:    a channel in the scantable. Default is the first
607                     channel, i.e. pos=0
608
609        """
610        if isinstance(rowno, int) and isinstance(chan, int):
611            qx = {'unit': self.get_unit(),
612                  'value': self._getabcissa(rowno)[chan]}
613            qy = {'unit': self.get_fluxunit(),
614                  'value': self._getspectrum(rowno)[chan]}
615            return qx, qy
616
617    def stddev(self, mask=None):
618        """\
619        Determine the standard deviation of the current beam/if/pol
620        Takes a 'mask' as an optional parameter to specify which
621        channels should be excluded.
622
623        Parameters:
624
625            mask:    an optional mask specifying where the standard
626                     deviation should be determined.
627
628        Example::
629
630            scan.set_unit('channel')
631            msk = scan.create_mask([100, 200], [500, 600])
632            scan.stddev(mask=m)
633
634        """
635        return self.stats(stat='stddev', mask=mask);
636
637
638    def get_column_names(self):
639        """\
640        Return a  list of column names, which can be used for selection.
641        """
642        return list(Scantable.get_column_names(self))
643
644    def get_tsys(self, row=-1):
645        """\
646        Return the System temperatures.
647
648        Parameters:
649
650            row:    the rowno to get the information for. (default all rows)
651
652        Returns:
653
654            a list of Tsys values for the current selection
655
656        """
657        if row > -1:
658            return self._get_column(self._gettsys, row)
659        return self._row_callback(self._gettsys, "Tsys")
660
661
662    def get_weather(self, row=-1):
663        """\
664        Return the weather informations.
665
666        Parameters:
667
668            row:    the rowno to get the information for. (default all rows)
669
670        Returns:
671
672            a dict or list of of dicts of values for the current selection
673
674        """
675
676        values = self._get_column(self._get_weather, row)
677        if row > -1:
678            return {'temperature': values[0],
679                    'pressure': values[1], 'humidity' : values[2],
680                    'windspeed' : values[3], 'windaz' : values[4]
681                    }
682        else:
683            out = []
684            for r in values:
685
686                out.append({'temperature': r[0],
687                            'pressure': r[1], 'humidity' : r[2],
688                            'windspeed' : r[3], 'windaz' : r[4]
689                    })
690            return out
691
692    def _row_callback(self, callback, label):
693        out = ""
694        outvec = []
695        sep = '-'*50
696        for i in range(self.nrow()):
697            tm = self._gettime(i)
698            src = self._getsourcename(i)
699            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
700            out += 'Time[%s]:\n' % (tm)
701            if self.nbeam(-1) > 1:
702                out +=  ' Beam[%d] ' % (self.getbeam(i))
703            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (self.getif(i))
704            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (self.getpol(i))
705            outvec.append(callback(i))
706            out += '= %3.3f\n' % (outvec[i])
707            out +=  sep+'\n'
708
709        asaplog.push(sep)
710        asaplog.push(" %s" % (label))
711        asaplog.push(sep)
712        asaplog.push(out)
713        asaplog.post()
714        return outvec
715
716    def _get_column(self, callback, row=-1):
717        """
718        """
719        if row == -1:
720            return [callback(i) for i in range(self.nrow())]
721        else:
722            if  0 <= row < self.nrow():
723                return callback(row)
724
725
726    def get_time(self, row=-1, asdatetime=False):
727        """\
728        Get a list of time stamps for the observations.
729        Return a datetime object for each integration time stamp in the scantable.
730
731        Parameters:
732
733            row:          row no of integration. Default -1 return all rows
734
735            asdatetime:   return values as datetime objects rather than strings
736
737        """
738        from time import strptime
739        from datetime import datetime
740        times = self._get_column(self._gettime, row)
741        if not asdatetime:
742            return times
743        format = "%Y/%m/%d/%H:%M:%S"
744        if isinstance(times, list):
745            return [datetime(*strptime(i, format)[:6]) for i in times]
746        else:
747            return datetime(*strptime(times, format)[:6])
748
749
750    def get_inttime(self, row=-1):
751        """\
752        Get a list of integration times for the observations.
753        Return a time in seconds for each integration in the scantable.
754
755        Parameters:
756
757            row:    row no of integration. Default -1 return all rows.
758
759        """
760        return self._get_column(self._getinttime, row)
761
762
763    def get_sourcename(self, row=-1):
764        """\
765        Get a list source names for the observations.
766        Return a string for each integration in the scantable.
767        Parameters:
768
769            row:    row no of integration. Default -1 return all rows.
770
771        """
772        return self._get_column(self._getsourcename, row)
773
774    def get_elevation(self, row=-1):
775        """\
776        Get a list of elevations for the observations.
777        Return a float for each integration in the scantable.
778
779        Parameters:
780
781            row:    row no of integration. Default -1 return all rows.
782
783        """
784        return self._get_column(self._getelevation, row)
785
786    def get_azimuth(self, row=-1):
787        """\
788        Get a list of azimuths for the observations.
789        Return a float for each integration in the scantable.
790
791        Parameters:
792            row:    row no of integration. Default -1 return all rows.
793
794        """
795        return self._get_column(self._getazimuth, row)
796
797    def get_parangle(self, row=-1):
798        """\
799        Get a list of parallactic angles for the observations.
800        Return a float for each integration in the scantable.
801
802        Parameters:
803
804            row:    row no of integration. Default -1 return all rows.
805
806        """
807        return self._get_column(self._getparangle, row)
808
809    def get_direction(self, row=-1):
810        """
811        Get a list of Positions on the sky (direction) for the observations.
812        Return a string for each integration in the scantable.
813
814        Parameters:
815
816            row:    row no of integration. Default -1 return all rows
817
818        """
819        return self._get_column(self._getdirection, row)
820
821    def get_directionval(self, row=-1):
822        """\
823        Get a list of Positions on the sky (direction) for the observations.
824        Return a float for each integration in the scantable.
825
826        Parameters:
827
828            row:    row no of integration. Default -1 return all rows
829
830        """
831        return self._get_column(self._getdirectionvec, row)
832
833    @asaplog_post_dec
834    def set_unit(self, unit='channel'):
835        """\
836        Set the unit for all following operations on this scantable
837
838        Parameters:
839
840            unit:    optional unit, default is 'channel'. Use one of '*Hz',
841                     'km/s', 'channel' or equivalent ''
842
843        """
844        varlist = vars()
845        if unit in ['', 'pixel', 'channel']:
846            unit = ''
847        inf = list(self._getcoordinfo())
848        inf[0] = unit
849        self._setcoordinfo(inf)
850        self._add_history("set_unit", varlist)
851
852    @asaplog_post_dec
853    def set_instrument(self, instr):
854        """\
855        Set the instrument for subsequent processing.
856
857        Parameters:
858
859            instr:    Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
860                      'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
861
862        """
863        self._setInstrument(instr)
864        self._add_history("set_instument", vars())
865
866    @asaplog_post_dec
867    def set_feedtype(self, feedtype):
868        """\
869        Overwrite the feed type, which might not be set correctly.
870
871        Parameters:
872
873            feedtype:     'linear' or 'circular'
874
875        """
876        self._setfeedtype(feedtype)
877        self._add_history("set_feedtype", vars())
878
879    @asaplog_post_dec
880    def set_doppler(self, doppler='RADIO'):
881        """\
882        Set the doppler for all following operations on this scantable.
883
884        Parameters:
885
886            doppler:    One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
887
888        """
889        varlist = vars()
890        inf = list(self._getcoordinfo())
891        inf[2] = doppler
892        self._setcoordinfo(inf)
893        self._add_history("set_doppler", vars())
894
895    @asaplog_post_dec
896    def set_freqframe(self, frame=None):
897        """\
898        Set the frame type of the Spectral Axis.
899
900        Parameters:
901
902            frame:   an optional frame type, default 'LSRK'. Valid frames are:
903                     'TOPO', 'LSRD', 'LSRK', 'BARY',
904                     'GEO', 'GALACTO', 'LGROUP', 'CMB'
905
906        Example::
907
908            scan.set_freqframe('BARY')
909
910        """
911        frame = frame or rcParams['scantable.freqframe']
912        varlist = vars()
913        # "REST" is not implemented in casacore
914        #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
915        #           'GEO', 'GALACTO', 'LGROUP', 'CMB']
916        valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
917                   'GEO', 'GALACTO', 'LGROUP', 'CMB']
918
919        if frame in valid:
920            inf = list(self._getcoordinfo())
921            inf[1] = frame
922            self._setcoordinfo(inf)
923            self._add_history("set_freqframe", varlist)
924        else:
925            msg  = "Please specify a valid freq type. Valid types are:\n", valid
926            raise TypeError(msg)
927
928    @asaplog_post_dec
929    def set_dirframe(self, frame=""):
930        """\
931        Set the frame type of the Direction on the sky.
932
933        Parameters:
934
935            frame:   an optional frame type, default ''. Valid frames are:
936                     'J2000', 'B1950', 'GALACTIC'
937
938        Example:
939
940            scan.set_dirframe('GALACTIC')
941
942        """
943        varlist = vars()
944        Scantable.set_dirframe(self, frame)
945        self._add_history("set_dirframe", varlist)
946
947    def get_unit(self):
948        """\
949        Get the default unit set in this scantable
950
951        Returns:
952
953            A unit string
954
955        """
956        inf = self._getcoordinfo()
957        unit = inf[0]
958        if unit == '': unit = 'channel'
959        return unit
960
961    @asaplog_post_dec
962    def get_abcissa(self, rowno=0):
963        """\
964        Get the abcissa in the current coordinate setup for the currently
965        selected Beam/IF/Pol
966
967        Parameters:
968
969            rowno:    an optional row number in the scantable. Default is the
970                      first row, i.e. rowno=0
971
972        Returns:
973
974            The abcissa values and the format string (as a dictionary)
975
976        """
977        abc = self._getabcissa(rowno)
978        lbl = self._getabcissalabel(rowno)
979        return abc, lbl
980
981    @asaplog_post_dec
982    def flag(self, mask=None, unflag=False):
983        """\
984        Flag the selected data using an optional channel mask.
985
986        Parameters:
987
988            mask:   an optional channel mask, created with create_mask. Default
989                    (no mask) is all channels.
990
991            unflag:    if True, unflag the data
992
993        """
994        varlist = vars()
995        mask = mask or []
996        self._flag(mask, unflag)
997        self._add_history("flag", varlist)
998
999    @asaplog_post_dec
1000    def flag_row(self, rows=[], unflag=False):
1001        """\
1002        Flag the selected data in row-based manner.
1003
1004        Parameters:
1005
1006            rows:   list of row numbers to be flagged. Default is no row
1007                    (must be explicitly specified to execute row-based flagging).
1008
1009            unflag: if True, unflag the data.
1010
1011        """
1012        varlist = vars()
1013        self._flag_row(rows, unflag)
1014        self._add_history("flag_row", varlist)
1015
1016    @asaplog_post_dec
1017    def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
1018        """\
1019        Flag the selected data outside a specified range (in channel-base)
1020
1021        Parameters:
1022
1023            uthres:      upper threshold.
1024
1025            dthres:      lower threshold
1026
1027            clipoutside: True for flagging data outside the range [dthres:uthres].
1028                         False for glagging data inside the range.
1029
1030            unflag:      if True, unflag the data.
1031
1032        """
1033        varlist = vars()
1034        self._clip(uthres, dthres, clipoutside, unflag)
1035        self._add_history("clip", varlist)
1036
1037    @asaplog_post_dec
1038    def lag_flag(self, start, end, unit="MHz", insitu=None):
1039        """\
1040        Flag the data in 'lag' space by providing a frequency to remove.
1041        Flagged data in the scantable gets interpolated over the region.
1042        No taper is applied.
1043
1044        Parameters:
1045
1046            start:    the start frequency (really a period within the
1047                      bandwidth)  or period to remove
1048
1049            end:      the end frequency or period to remove
1050
1051            unit:     the frequency unit (default "MHz") or "" for
1052                      explicit lag channels
1053
1054        *Notes*:
1055
1056            It is recommended to flag edges of the band or strong
1057            signals beforehand.
1058
1059        """
1060        if insitu is None: insitu = rcParams['insitu']
1061        self._math._setinsitu(insitu)
1062        varlist = vars()
1063        base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1064        if not (unit == "" or base.has_key(unit)):
1065            raise ValueError("%s is not a valid unit." % unit)
1066        if unit == "":
1067            s = scantable(self._math._lag_flag(self, start, end, "lags"))
1068        else:
1069            s = scantable(self._math._lag_flag(self, start*base[unit],
1070                                               end*base[unit], "frequency"))
1071        s._add_history("lag_flag", varlist)
1072        if insitu:
1073            self._assign(s)
1074        else:
1075            return s
1076
1077    @asaplog_post_dec
1078    def create_mask(self, *args, **kwargs):
1079        """\
1080        Compute and return a mask based on [min, max] windows.
1081        The specified windows are to be INCLUDED, when the mask is
1082        applied.
1083
1084        Parameters:
1085
1086            [min, max], [min2, max2], ...
1087                Pairs of start/end points (inclusive)specifying the regions
1088                to be masked
1089
1090            invert:     optional argument. If specified as True,
1091                        return an inverted mask, i.e. the regions
1092                        specified are EXCLUDED
1093
1094            row:        create the mask using the specified row for
1095                        unit conversions, default is row=0
1096                        only necessary if frequency varies over rows.
1097
1098        Examples::
1099
1100            scan.set_unit('channel')
1101            # a)
1102            msk = scan.create_mask([400, 500], [800, 900])
1103            # masks everything outside 400 and 500
1104            # and 800 and 900 in the unit 'channel'
1105
1106            # b)
1107            msk = scan.create_mask([400, 500], [800, 900], invert=True)
1108            # masks the regions between 400 and 500
1109            # and 800 and 900 in the unit 'channel'
1110
1111            # c)
1112            #mask only channel 400
1113            msk =  scan.create_mask([400])
1114
1115        """
1116        row = kwargs.get("row", 0)
1117        data = self._getabcissa(row)
1118        u = self._getcoordinfo()[0]
1119        if u == "":
1120            u = "channel"
1121        msg = "The current mask window unit is %s" % u
1122        i = self._check_ifs()
1123        if not i:
1124            msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1125        asaplog.push(msg)
1126        n = self.nchan()
1127        msk = _n_bools(n, False)
1128        # test if args is a 'list' or a 'normal *args - UGLY!!!
1129
1130        ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1131             and args or args[0]
1132        for window in ws:
1133            if len(window) == 1:
1134                window = [window[0], window[0]]
1135            if len(window) == 0 or  len(window) > 2:
1136                raise ValueError("A window needs to be defined as [start(, end)]")
1137            if window[0] > window[1]:
1138                tmp = window[0]
1139                window[0] = window[1]
1140                window[1] = tmp
1141            for i in range(n):
1142                if data[i] >= window[0] and data[i] <= window[1]:
1143                    msk[i] = True
1144        if kwargs.has_key('invert'):
1145            if kwargs.get('invert'):
1146                msk = mask_not(msk)
1147        return msk
1148
1149    def get_masklist(self, mask=None, row=0):
1150        """\
1151        Compute and return a list of mask windows, [min, max].
1152
1153        Parameters:
1154
1155            mask:       channel mask, created with create_mask.
1156
1157            row:        calcutate the masklist using the specified row
1158                        for unit conversions, default is row=0
1159                        only necessary if frequency varies over rows.
1160
1161        Returns:
1162
1163            [min, max], [min2, max2], ...
1164                Pairs of start/end points (inclusive)specifying
1165                the masked regions
1166
1167        """
1168        if not (isinstance(mask,list) or isinstance(mask, tuple)):
1169            raise TypeError("The mask should be list or tuple.")
1170        if len(mask) < 2:
1171            raise TypeError("The mask elements should be > 1")
1172        if self.nchan() != len(mask):
1173            msg = "Number of channels in scantable != number of mask elements"
1174            raise TypeError(msg)
1175        data = self._getabcissa(row)
1176        u = self._getcoordinfo()[0]
1177        if u == "":
1178            u = "channel"
1179        msg = "The current mask window unit is %s" % u
1180        i = self._check_ifs()
1181        if not i:
1182            msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1183        asaplog.push(msg)
1184        masklist=[]
1185        ist, ien = None, None
1186        ist, ien=self.get_mask_indices(mask)
1187        if ist is not None and ien is not None:
1188            for i in xrange(len(ist)):
1189                range=[data[ist[i]],data[ien[i]]]
1190                range.sort()
1191                masklist.append([range[0],range[1]])
1192        return masklist
1193
1194    def get_mask_indices(self, mask=None):
1195        """\
1196        Compute and Return lists of mask start indices and mask end indices.
1197
1198        Parameters:
1199
1200            mask:       channel mask, created with create_mask.
1201
1202        Returns:
1203
1204            List of mask start indices and that of mask end indices,
1205            i.e., [istart1,istart2,....], [iend1,iend2,....].
1206
1207        """
1208        if not (isinstance(mask,list) or isinstance(mask, tuple)):
1209            raise TypeError("The mask should be list or tuple.")
1210        if len(mask) < 2:
1211            raise TypeError("The mask elements should be > 1")
1212        istart=[]
1213        iend=[]
1214        if mask[0]: istart.append(0)
1215        for i in range(len(mask)-1):
1216            if not mask[i] and mask[i+1]:
1217                istart.append(i+1)
1218            elif mask[i] and not mask[i+1]:
1219                iend.append(i)
1220        if mask[len(mask)-1]: iend.append(len(mask)-1)
1221        if len(istart) != len(iend):
1222            raise RuntimeError("Numbers of mask start != mask end.")
1223        for i in range(len(istart)):
1224            if istart[i] > iend[i]:
1225                raise RuntimeError("Mask start index > mask end index")
1226                break
1227        return istart,iend
1228
1229#    def get_restfreqs(self):
1230#        """
1231#        Get the restfrequency(s) stored in this scantable.
1232#        The return value(s) are always of unit 'Hz'
1233#        Parameters:
1234#            none
1235#        Returns:
1236#            a list of doubles
1237#        """
1238#        return list(self._getrestfreqs())
1239
1240    def get_restfreqs(self, ids=None):
1241        """\
1242        Get the restfrequency(s) stored in this scantable.
1243        The return value(s) are always of unit 'Hz'
1244
1245        Parameters:
1246
1247            ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1248                 be retrieved
1249
1250        Returns:
1251
1252            dictionary containing ids and a list of doubles for each id
1253
1254        """
1255        if ids is None:
1256            rfreqs={}
1257            idlist = self.getmolnos()
1258            for i in idlist:
1259                rfreqs[i]=list(self._getrestfreqs(i))
1260            return rfreqs
1261        else:
1262            if type(ids)==list or type(ids)==tuple:
1263                rfreqs={}
1264                for i in ids:
1265                    rfreqs[i]=list(self._getrestfreqs(i))
1266                return rfreqs
1267            else:
1268                return list(self._getrestfreqs(ids))
1269            #return list(self._getrestfreqs(ids))
1270
1271    def set_restfreqs(self, freqs=None, unit='Hz'):
1272        """\
1273        Set or replace the restfrequency specified and
1274        If the 'freqs' argument holds a scalar,
1275        then that rest frequency will be applied to all the selected
1276        data.  If the 'freqs' argument holds
1277        a vector, then it MUST be of equal or smaller length than
1278        the number of IFs (and the available restfrequencies will be
1279        replaced by this vector).  In this case, *all* data have
1280        the restfrequency set per IF according
1281        to the corresponding value you give in the 'freqs' vector.
1282        E.g. 'freqs=[1e9, 2e9]'  would mean IF 0 gets restfreq 1e9 and
1283        IF 1 gets restfreq 2e9.
1284
1285        You can also specify the frequencies via a linecatalog.
1286
1287        Parameters:
1288
1289            freqs:   list of rest frequency values or string idenitfiers
1290
1291            unit:    unit for rest frequency (default 'Hz')
1292
1293
1294        Example::
1295
1296            # set the given restfrequency for the all currently selected IFs
1297            scan.set_restfreqs(freqs=1.4e9)
1298            # set restfrequencies for the n IFs  (n > 1) in the order of the
1299            # list, i.e
1300            # IF0 -> 1.4e9, IF1 ->  1.41e9, IF3 -> 1.42e9
1301            # len(list_of_restfreqs) == nIF
1302            # for nIF == 1 the following will set multiple restfrequency for
1303            # that IF
1304            scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
1305            # set multiple restfrequencies per IF. as a list of lists where
1306            # the outer list has nIF elements, the inner s arbitrary
1307            scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
1308
1309       *Note*:
1310
1311            To do more sophisticate Restfrequency setting, e.g. on a
1312            source and IF basis, use scantable.set_selection() before using
1313            this function::
1314
1315                # provided your scantable is called scan
1316                selection = selector()
1317                selection.set_name("ORION*")
1318                selection.set_ifs([1])
1319                scan.set_selection(selection)
1320                scan.set_restfreqs(freqs=86.6e9)
1321
1322        """
1323        varlist = vars()
1324        from asap import linecatalog
1325        # simple  value
1326        if isinstance(freqs, int) or isinstance(freqs, float):
1327            self._setrestfreqs([freqs], [""], unit)
1328        # list of values
1329        elif isinstance(freqs, list) or isinstance(freqs, tuple):
1330            # list values are scalars
1331            if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
1332                if len(freqs) == 1:
1333                    self._setrestfreqs(freqs, [""], unit)
1334                else:
1335                    # allow the 'old' mode of setting mulitple IFs
1336                    sel = selector()
1337                    savesel = self._getselection()
1338                    iflist = self.getifnos()
1339                    if len(freqs)>len(iflist):
1340                        raise ValueError("number of elements in list of list "
1341                                         "exeeds the current IF selections")
1342                    iflist = self.getifnos()
1343                    for i, fval in enumerate(freqs):
1344                        sel.set_ifs(iflist[i])
1345                        self._setselection(sel)
1346                        self._setrestfreqs([fval], [""], unit)
1347                    self._setselection(savesel)
1348
1349            # list values are dict, {'value'=, 'name'=)
1350            elif isinstance(freqs[-1], dict):
1351                values = []
1352                names = []
1353                for d in freqs:
1354                    values.append(d["value"])
1355                    names.append(d["name"])
1356                self._setrestfreqs(values, names, unit)
1357            elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
1358                sel = selector()
1359                savesel = self._getselection()
1360                iflist = self.getifnos()
1361                if len(freqs)>len(iflist):
1362                    raise ValueError("number of elements in list of list exeeds"
1363                                     " the current IF selections")
1364                for i, fval in enumerate(freqs):
1365                    sel.set_ifs(iflist[i])
1366                    self._setselection(sel)
1367                    self._setrestfreqs(fval, [""], unit)
1368                self._setselection(savesel)
1369        # freqs are to be taken from a linecatalog
1370        elif isinstance(freqs, linecatalog):
1371            sel = selector()
1372            savesel = self._getselection()
1373            for i in xrange(freqs.nrow()):
1374                sel.set_ifs(iflist[i])
1375                self._setselection(sel)
1376                self._setrestfreqs([freqs.get_frequency(i)],
1377                                   [freqs.get_name(i)], "MHz")
1378                # ensure that we are not iterating past nIF
1379                if i == self.nif()-1: break
1380            self._setselection(savesel)
1381        else:
1382            return
1383        self._add_history("set_restfreqs", varlist)
1384
1385    def shift_refpix(self, delta):
1386        """\
1387        Shift the reference pixel of the Spectra Coordinate by an
1388        integer amount.
1389
1390        Parameters:
1391
1392            delta:   the amount to shift by
1393
1394        *Note*:
1395
1396            Be careful using this with broadband data.
1397
1398        """
1399        Scantable.shift_refpix(self, delta)
1400
1401    @asaplog_post_dec
1402    def history(self, filename=None):
1403        """\
1404        Print the history. Optionally to a file.
1405
1406        Parameters:
1407
1408            filename:    The name  of the file to save the history to.
1409
1410        """
1411        hist = list(self._gethistory())
1412        out = "-"*80
1413        for h in hist:
1414            if h.startswith("---"):
1415                out = "\n".join([out, h])
1416            else:
1417                items = h.split("##")
1418                date = items[0]
1419                func = items[1]
1420                items = items[2:]
1421                out += "\n"+date+"\n"
1422                out += "Function: %s\n  Parameters:" % (func)
1423                for i in items:
1424                    s = i.split("=")
1425                    out += "\n   %s = %s" % (s[0], s[1])
1426                out = "\n".join([out, "-"*80])
1427        if filename is not None:
1428            if filename is "":
1429                filename = 'scantable_history.txt'
1430            import os
1431            filename = os.path.expandvars(os.path.expanduser(filename))
1432            if not os.path.isdir(filename):
1433                data = open(filename, 'w')
1434                data.write(out)
1435                data.close()
1436            else:
1437                msg = "Illegal file name '%s'." % (filename)
1438                raise IOError(msg)
1439        return page(out)
1440    #
1441    # Maths business
1442    #
1443    @asaplog_post_dec
1444    def average_time(self, mask=None, scanav=False, weight='tint', align=False):
1445        """\
1446        Return the (time) weighted average of a scan.
1447
1448        *Note*:
1449
1450            in channels only - align if necessary
1451
1452        Parameters:
1453
1454            mask:     an optional mask (only used for 'var' and 'tsys'
1455                      weighting)
1456
1457            scanav:   True averages each scan separately
1458                      False (default) averages all scans together,
1459
1460            weight:   Weighting scheme.
1461                      'none'     (mean no weight)
1462                      'var'      (1/var(spec) weighted)
1463                      'tsys'     (1/Tsys**2 weighted)
1464                      'tint'     (integration time weighted)
1465                      'tintsys'  (Tint/Tsys**2)
1466                      'median'   ( median averaging)
1467                      The default is 'tint'
1468
1469            align:    align the spectra in velocity before averaging. It takes
1470                      the time of the first spectrum as reference time.
1471
1472        Example::
1473
1474            # time average the scantable without using a mask
1475            newscan = scan.average_time()
1476
1477        """
1478        varlist = vars()
1479        weight = weight or 'TINT'
1480        mask = mask or ()
1481        scanav = (scanav and 'SCAN') or 'NONE'
1482        scan = (self, )
1483
1484        if align:
1485            scan = (self.freq_align(insitu=False), )
1486        s = None
1487        if weight.upper() == 'MEDIAN':
1488            s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1489                                                     scanav))
1490        else:
1491            s = scantable(self._math._average(scan, mask, weight.upper(),
1492                          scanav))
1493        s._add_history("average_time", varlist)
1494        return s
1495
1496    @asaplog_post_dec
1497    def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
1498        """\
1499        Return a scan where all spectra are converted to either
1500        Jansky or Kelvin depending upon the flux units of the scan table.
1501        By default the function tries to look the values up internally.
1502        If it can't find them (or if you want to over-ride), you must
1503        specify EITHER jyperk OR eta (and D which it will try to look up
1504        also if you don't set it). jyperk takes precedence if you set both.
1505
1506        Parameters:
1507
1508            jyperk:      the Jy / K conversion factor
1509
1510            eta:         the aperture efficiency
1511
1512            d:           the geomtric diameter (metres)
1513
1514            insitu:      if False a new scantable is returned.
1515                         Otherwise, the scaling is done in-situ
1516                         The default is taken from .asaprc (False)
1517
1518        """
1519        if insitu is None: insitu = rcParams['insitu']
1520        self._math._setinsitu(insitu)
1521        varlist = vars()
1522        jyperk = jyperk or -1.0
1523        d = d or -1.0
1524        eta = eta or -1.0
1525        s = scantable(self._math._convertflux(self, d, eta, jyperk))
1526        s._add_history("convert_flux", varlist)
1527        if insitu: self._assign(s)
1528        else: return s
1529
1530    @asaplog_post_dec
1531    def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1532        """\
1533        Return a scan after applying a gain-elevation correction.
1534        The correction can be made via either a polynomial or a
1535        table-based interpolation (and extrapolation if necessary).
1536        You specify polynomial coefficients, an ascii table or neither.
1537        If you specify neither, then a polynomial correction will be made
1538        with built in coefficients known for certain telescopes (an error
1539        will occur if the instrument is not known).
1540        The data and Tsys are *divided* by the scaling factors.
1541
1542        Parameters:
1543
1544            poly:        Polynomial coefficients (default None) to compute a
1545                         gain-elevation correction as a function of
1546                         elevation (in degrees).
1547
1548            filename:    The name of an ascii file holding correction factors.
1549                         The first row of the ascii file must give the column
1550                         names and these MUST include columns
1551                         "ELEVATION" (degrees) and "FACTOR" (multiply data
1552                         by this) somewhere.
1553                         The second row must give the data type of the
1554                         column. Use 'R' for Real and 'I' for Integer.
1555                         An example file would be
1556                         (actual factors are arbitrary) :
1557
1558                         TIME ELEVATION FACTOR
1559                         R R R
1560                         0.1 0 0.8
1561                         0.2 20 0.85
1562                         0.3 40 0.9
1563                         0.4 60 0.85
1564                         0.5 80 0.8
1565                         0.6 90 0.75
1566
1567            method:      Interpolation method when correcting from a table.
1568                         Values are  "nearest", "linear" (default), "cubic"
1569                         and "spline"
1570
1571            insitu:      if False a new scantable is returned.
1572                         Otherwise, the scaling is done in-situ
1573                         The default is taken from .asaprc (False)
1574
1575        """
1576
1577        if insitu is None: insitu = rcParams['insitu']
1578        self._math._setinsitu(insitu)
1579        varlist = vars()
1580        poly = poly or ()
1581        from os.path import expandvars
1582        filename = expandvars(filename)
1583        s = scantable(self._math._gainel(self, poly, filename, method))
1584        s._add_history("gain_el", varlist)
1585        if insitu:
1586            self._assign(s)
1587        else:
1588            return s
1589
1590    @asaplog_post_dec
1591    def freq_align(self, reftime=None, method='cubic', insitu=None):
1592        """\
1593        Return a scan where all rows have been aligned in frequency/velocity.
1594        The alignment frequency frame (e.g. LSRK) is that set by function
1595        set_freqframe.
1596
1597        Parameters:
1598
1599            reftime:     reference time to align at. By default, the time of
1600                         the first row of data is used.
1601
1602            method:      Interpolation method for regridding the spectra.
1603                         Choose from "nearest", "linear", "cubic" (default)
1604                         and "spline"
1605
1606            insitu:      if False a new scantable is returned.
1607                         Otherwise, the scaling is done in-situ
1608                         The default is taken from .asaprc (False)
1609
1610        """
1611        if insitu is None: insitu = rcParams["insitu"]
1612        self._math._setinsitu(insitu)
1613        varlist = vars()
1614        reftime = reftime or ""
1615        s = scantable(self._math._freq_align(self, reftime, method))
1616        s._add_history("freq_align", varlist)
1617        if insitu: self._assign(s)
1618        else: return s
1619
1620    @asaplog_post_dec
1621    def opacity(self, tau=None, insitu=None):
1622        """\
1623        Apply an opacity correction. The data
1624        and Tsys are multiplied by the correction factor.
1625
1626        Parameters:
1627
1628            tau:         (list of) opacity from which the correction factor is
1629                         exp(tau*ZD)
1630                         where ZD is the zenith-distance.
1631                         If a list is provided, it has to be of length nIF,
1632                         nIF*nPol or 1 and in order of IF/POL, e.g.
1633                         [opif0pol0, opif0pol1, opif1pol0 ...]
1634                         if tau is `None` the opacities are determined from a
1635                         model.
1636
1637            insitu:      if False a new scantable is returned.
1638                         Otherwise, the scaling is done in-situ
1639                         The default is taken from .asaprc (False)
1640
1641        """
1642        if insitu is None: insitu = rcParams['insitu']
1643        self._math._setinsitu(insitu)
1644        varlist = vars()
1645        if not hasattr(tau, "__len__"):
1646            tau = [tau]
1647        s = scantable(self._math._opacity(self, tau))
1648        s._add_history("opacity", varlist)
1649        if insitu: self._assign(s)
1650        else: return s
1651
1652    @asaplog_post_dec
1653    def bin(self, width=5, insitu=None):
1654        """\
1655        Return a scan where all spectra have been binned up.
1656
1657        Parameters:
1658
1659            width:       The bin width (default=5) in pixels
1660
1661            insitu:      if False a new scantable is returned.
1662                         Otherwise, the scaling is done in-situ
1663                         The default is taken from .asaprc (False)
1664
1665        """
1666        if insitu is None: insitu = rcParams['insitu']
1667        self._math._setinsitu(insitu)
1668        varlist = vars()
1669        s = scantable(self._math._bin(self, width))
1670        s._add_history("bin", varlist)
1671        if insitu:
1672            self._assign(s)
1673        else:
1674            return s
1675
1676    @asaplog_post_dec
1677    def resample(self, width=5, method='cubic', insitu=None):
1678        """\
1679        Return a scan where all spectra have been binned up.
1680
1681        Parameters:
1682
1683            width:       The bin width (default=5) in pixels
1684
1685            method:      Interpolation method when correcting from a table.
1686                         Values are  "nearest", "linear", "cubic" (default)
1687                         and "spline"
1688
1689            insitu:      if False a new scantable is returned.
1690                         Otherwise, the scaling is done in-situ
1691                         The default is taken from .asaprc (False)
1692
1693        """
1694        if insitu is None: insitu = rcParams['insitu']
1695        self._math._setinsitu(insitu)
1696        varlist = vars()
1697        s = scantable(self._math._resample(self, method, width))
1698        s._add_history("resample", varlist)
1699        if insitu: self._assign(s)
1700        else: return s
1701
1702    @asaplog_post_dec
1703    def average_pol(self, mask=None, weight='none'):
1704        """\
1705        Average the Polarisations together.
1706
1707        Parameters:
1708
1709            mask:        An optional mask defining the region, where the
1710                         averaging will be applied. The output will have all
1711                         specified points masked.
1712
1713            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1714                         weighted), or 'tsys' (1/Tsys**2 weighted)
1715
1716        """
1717        varlist = vars()
1718        mask = mask or ()
1719        s = scantable(self._math._averagepol(self, mask, weight.upper()))
1720        s._add_history("average_pol", varlist)
1721        return s
1722
1723    @asaplog_post_dec
1724    def average_beam(self, mask=None, weight='none'):
1725        """\
1726        Average the Beams together.
1727
1728        Parameters:
1729            mask:        An optional mask defining the region, where the
1730                         averaging will be applied. The output will have all
1731                         specified points masked.
1732
1733            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1734                         weighted), or 'tsys' (1/Tsys**2 weighted)
1735
1736        """
1737        varlist = vars()
1738        mask = mask or ()
1739        s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1740        s._add_history("average_beam", varlist)
1741        return s
1742
1743    def parallactify(self, pflag):
1744        """\
1745        Set a flag to indicate whether this data should be treated as having
1746        been 'parallactified' (total phase == 0.0)
1747
1748        Parameters:
1749
1750            pflag:  Bool indicating whether to turn this on (True) or
1751                    off (False)
1752
1753        """
1754        varlist = vars()
1755        self._parallactify(pflag)
1756        self._add_history("parallactify", varlist)
1757
1758    @asaplog_post_dec
1759    def convert_pol(self, poltype=None):
1760        """\
1761        Convert the data to a different polarisation type.
1762        Note that you will need cross-polarisation terms for most conversions.
1763
1764        Parameters:
1765
1766            poltype:    The new polarisation type. Valid types are:
1767                        "linear", "circular", "stokes" and "linpol"
1768
1769        """
1770        varlist = vars()
1771        s = scantable(self._math._convertpol(self, poltype))
1772        s._add_history("convert_pol", varlist)
1773        return s
1774
1775    @asaplog_post_dec
1776    def smooth(self, kernel="hanning", width=5.0, order=2, plot=False, insitu=None):
1777        """\
1778        Smooth the spectrum by the specified kernel (conserving flux).
1779
1780        Parameters:
1781
1782            kernel:     The type of smoothing kernel. Select from
1783                        'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1784                        or 'poly'
1785
1786            width:      The width of the kernel in pixels. For hanning this is
1787                        ignored otherwise it defauls to 5 pixels.
1788                        For 'gaussian' it is the Full Width Half
1789                        Maximum. For 'boxcar' it is the full width.
1790                        For 'rmedian' and 'poly' it is the half width.
1791
1792            order:      Optional parameter for 'poly' kernel (default is 2), to
1793                        specify the order of the polnomial. Ignored by all other
1794                        kernels.
1795
1796            plot:       plot the original and the smoothed spectra.
1797                        In this each indivual fit has to be approved, by
1798                        typing 'y' or 'n'
1799
1800            insitu:     if False a new scantable is returned.
1801                        Otherwise, the scaling is done in-situ
1802                        The default is taken from .asaprc (False)
1803
1804        """
1805        if insitu is None: insitu = rcParams['insitu']
1806        self._math._setinsitu(insitu)
1807        varlist = vars()
1808
1809        if plot: orgscan = self.copy()
1810
1811        s = scantable(self._math._smooth(self, kernel.lower(), width, order))
1812        s._add_history("smooth", varlist)
1813
1814        if plot:
1815            if rcParams['plotter.gui']:
1816                from asap.asaplotgui import asaplotgui as asaplot
1817            else:
1818                from asap.asaplot import asaplot
1819            self._p=asaplot()
1820            self._p.set_panels()
1821            ylab=s._get_ordinate_label()
1822            #self._p.palette(0,["#777777","red"])
1823            for r in xrange(s.nrow()):
1824                xsm=s._getabcissa(r)
1825                ysm=s._getspectrum(r)
1826                xorg=orgscan._getabcissa(r)
1827                yorg=orgscan._getspectrum(r)
1828                self._p.clear()
1829                self._p.hold()
1830                self._p.set_axes('ylabel',ylab)
1831                self._p.set_axes('xlabel',s._getabcissalabel(r))
1832                self._p.set_axes('title',s._getsourcename(r))
1833                self._p.set_line(label='Original',color="#777777")
1834                self._p.plot(xorg,yorg)
1835                self._p.set_line(label='Smoothed',color="red")
1836                self._p.plot(xsm,ysm)
1837                ### Ugly part for legend
1838                for i in [0,1]:
1839                    self._p.subplots[0]['lines'].append([self._p.subplots[0]['axes'].lines[i]])
1840                self._p.release()
1841                ### Ugly part for legend
1842                self._p.subplots[0]['lines']=[]
1843                res = raw_input("Accept smoothing ([y]/n): ")
1844                if res.upper() == 'N':
1845                    s._setspectrum(yorg, r)
1846            self._p.unmap()
1847            self._p = None
1848            del orgscan
1849
1850        if insitu: self._assign(s)
1851        else: return s
1852
1853    @asaplog_post_dec
1854    def old_poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None, rows=None):
1855        """\
1856        Return a scan which has been baselined (all rows) by a polynomial.
1857       
1858        Parameters:
1859
1860            mask:       an optional mask
1861
1862            order:      the order of the polynomial (default is 0)
1863
1864            plot:       plot the fit and the residual. In this each
1865                        indivual fit has to be approved, by typing 'y'
1866                        or 'n'
1867
1868            uselin:     use linear polynomial fit
1869
1870            insitu:     if False a new scantable is returned.
1871                        Otherwise, the scaling is done in-situ
1872                        The default is taken from .asaprc (False)
1873
1874            rows:       row numbers of spectra to be processed.
1875                        (default is None: for all rows)
1876       
1877        Example:
1878            # return a scan baselined by a third order polynomial,
1879            # not using a mask
1880            bscan = scan.poly_baseline(order=3)
1881
1882        """
1883        if insitu is None: insitu = rcParams['insitu']
1884        if not insitu:
1885            workscan = self.copy()
1886        else:
1887            workscan = self
1888        varlist = vars()
1889        if mask is None:
1890            mask = [True for i in xrange(self.nchan())]
1891
1892        try:
1893            f = fitter()
1894            if uselin:
1895                f.set_function(lpoly=order)
1896            else:
1897                f.set_function(poly=order)
1898
1899            if rows == None:
1900                rows = xrange(workscan.nrow())
1901            elif isinstance(rows, int):
1902                rows = [ rows ]
1903           
1904            if len(rows) > 0:
1905                self.blpars = []
1906                self.masklists = []
1907                self.actualmask = []
1908           
1909            for r in rows:
1910                f.x = workscan._getabcissa(r)
1911                f.y = workscan._getspectrum(r)
1912                f.mask = mask_and(mask, workscan._getmask(r))    # (CAS-1434)
1913                f.data = None
1914                f.fit()
1915                if plot:
1916                    f.plot(residual=True)
1917                    x = raw_input("Accept fit ( [y]/n ): ")
1918                    if x.upper() == 'N':
1919                        self.blpars.append(None)
1920                        self.masklists.append(None)
1921                        self.actualmask.append(None)
1922                        continue
1923                workscan._setspectrum(f.fitter.getresidual(), r)
1924                self.blpars.append(f.get_parameters())
1925                self.masklists.append(workscan.get_masklist(f.mask, row=r))
1926                self.actualmask.append(f.mask)
1927
1928            if plot:
1929                f._p.unmap()
1930                f._p = None
1931            workscan._add_history("poly_baseline", varlist)
1932            if insitu:
1933                self._assign(workscan)
1934            else:
1935                return workscan
1936        except RuntimeError:
1937            msg = "The fit failed, possibly because it didn't converge."
1938            raise RuntimeError(msg)
1939
1940
1941    def poly_baseline(self, mask=None, order=0, plot=False, batch=False, insitu=None, rows=None):
1942        """\
1943        Return a scan which has been baselined (all rows) by a polynomial.
1944        Parameters:
1945            mask:       an optional mask
1946            order:      the order of the polynomial (default is 0)
1947            plot:       plot the fit and the residual. In this each
1948                        indivual fit has to be approved, by typing 'y'
1949                        or 'n'. Ignored if batch = True.
1950            batch:      if True a faster algorithm is used and logs
1951                        including the fit results are not output
1952                        (default is False)
1953            insitu:     if False a new scantable is returned.
1954                        Otherwise, the scaling is done in-situ
1955                        The default is taken from .asaprc (False)
1956            rows:       row numbers of spectra to be processed.
1957                        (default is None: for all rows)
1958        Example:
1959            # return a scan baselined by a third order polynomial,
1960            # not using a mask
1961            bscan = scan.poly_baseline(order=3)
1962        """
1963        if insitu is None: insitu = rcParams["insitu"]
1964        if insitu:
1965            workscan = self
1966        else:
1967            workscan = self.copy()
1968
1969        varlist = vars()
1970        nchan = workscan.nchan()
1971       
1972        if mask is None:
1973            mask = [True for i in xrange(nchan)]
1974
1975        try:
1976            if rows == None:
1977                rows = xrange(workscan.nrow())
1978            elif isinstance(rows, int):
1979                rows = [ rows ]
1980           
1981            if len(rows) > 0:
1982                self.blpars = []
1983                self.masklists = []
1984                self.actualmask = []
1985
1986            if batch:
1987                for r in rows:
1988                    workscan._poly_baseline_batch(mask, order, r)
1989            elif plot:
1990                f = fitter()
1991                f.set_function(lpoly=order)
1992                for r in rows:
1993                    f.x = workscan._getabcissa(r)
1994                    f.y = workscan._getspectrum(r)
1995                    f.mask = mask_and(mask, workscan._getmask(r))    # (CAS-1434)
1996                    f.data = None
1997                    f.fit()
1998                   
1999                    f.plot(residual=True)
2000                    accept_fit = raw_input("Accept fit ( [y]/n ): ")
2001                    if accept_fit.upper() == "N":
2002                        self.blpars.append(None)
2003                        self.masklists.append(None)
2004                        self.actualmask.append(None)
2005                        continue
2006                    workscan._setspectrum(f.fitter.getresidual(), r)
2007                    self.blpars.append(f.get_parameters())
2008                    self.masklists.append(workscan.get_masklist(f.mask, row=r))
2009                    self.actualmask.append(f.mask)
2010                   
2011                f._p.unmap()
2012                f._p = None
2013            else:
2014                import array
2015                for r in rows:
2016                    pars = array.array("f", [0.0 for i in range(order+1)])
2017                    pars_adr = pars.buffer_info()[0]
2018                    pars_len = pars.buffer_info()[1]
2019                   
2020                    errs = array.array("f", [0.0 for i in range(order+1)])
2021                    errs_adr = errs.buffer_info()[0]
2022                    errs_len = errs.buffer_info()[1]
2023                   
2024                    fmsk = array.array("i", [1 for i in range(nchan)])
2025                    fmsk_adr = fmsk.buffer_info()[0]
2026                    fmsk_len = fmsk.buffer_info()[1]
2027                   
2028                    workscan._poly_baseline(mask, order, r, pars_adr, pars_len, errs_adr, errs_len, fmsk_adr, fmsk_len)
2029                   
2030                    params = pars.tolist()
2031                    fmtd = ""
2032                    for i in xrange(len(params)): fmtd += "  p%d= %3.6f," % (i, params[i])
2033                    fmtd = fmtd[:-1]  # remove trailing ","
2034                    errors = errs.tolist()
2035                    fmask = fmsk.tolist()
2036                    for i in xrange(len(fmask)): fmask[i] = (fmask[i] > 0)    # transform (1/0) -> (True/False)
2037                   
2038                    self.blpars.append({"params":params, "fixed":[], "formatted":fmtd, "errors":errors})
2039                    self.masklists.append(workscan.get_masklist(fmask, r))
2040                    self.actualmask.append(fmask)
2041                   
2042                    asaplog.push(str(fmtd))
2043           
2044            workscan._add_history("poly_baseline", varlist)
2045            print_log()
2046           
2047            if insitu:
2048                self._assign(workscan)
2049            else:
2050                return workscan
2051           
2052        except RuntimeError:
2053            msg = "The fit failed, possibly because it didn't converge."
2054            if rcParams["verbose"]:
2055                print_log()
2056                asaplog.push(str(msg))
2057                print_log("ERROR")
2058                return
2059            else:
2060                raise RuntimeError(msg)
2061
2062
2063    def auto_poly_baseline(self, mask=None, edge=(0, 0), order=0,
2064                           threshold=3, chan_avg_limit=1, plot=False,
2065                           insitu=None, rows=None):
2066        """\
2067        Return a scan which has been baselined (all rows) by a polynomial.
2068        Spectral lines are detected first using linefinder and masked out
2069        to avoid them affecting the baseline solution.
2070
2071        Parameters:
2072
2073            mask:       an optional mask retreived from scantable
2074
2075            edge:       an optional number of channel to drop at the edge of
2076                        spectrum. If only one value is
2077                        specified, the same number will be dropped from
2078                        both sides of the spectrum. Default is to keep
2079                        all channels. Nested tuples represent individual
2080                        edge selection for different IFs (a number of spectral
2081                        channels can be different)
2082
2083            order:      the order of the polynomial (default is 0)
2084
2085            threshold:  the threshold used by line finder. It is better to
2086                        keep it large as only strong lines affect the
2087                        baseline solution.
2088
2089            chan_avg_limit:
2090                        a maximum number of consequtive spectral channels to
2091                        average during the search of weak and broad lines.
2092                        The default is no averaging (and no search for weak
2093                        lines). If such lines can affect the fitted baseline
2094                        (e.g. a high order polynomial is fitted), increase this
2095                        parameter (usually values up to 8 are reasonable). Most
2096                        users of this method should find the default value
2097                        sufficient.
2098
2099            plot:       plot the fit and the residual. In this each
2100                        indivual fit has to be approved, by typing 'y'
2101                        or 'n'
2102
2103            insitu:     if False a new scantable is returned.
2104                        Otherwise, the scaling is done in-situ
2105                        The default is taken from .asaprc (False)
2106            rows:       row numbers of spectra to be processed.
2107                        (default is None: for all rows)
2108
2109
2110        Example::
2111
2112            scan2 = scan.auto_poly_baseline(order=7, insitu=False)
2113
2114        """
2115        if insitu is None: insitu = rcParams['insitu']
2116        varlist = vars()
2117        from asap.asaplinefind import linefinder
2118        from asap import _is_sequence_or_number as _is_valid
2119
2120        # check whether edge is set up for each IF individually
2121        individualedge = False;
2122        if len(edge) > 1:
2123            if isinstance(edge[0], list) or isinstance(edge[0], tuple):
2124                individualedge = True;
2125
2126        if not _is_valid(edge, int) and not individualedge:
2127            raise ValueError, "Parameter 'edge' has to be an integer or a \
2128            pair of integers specified as a tuple. Nested tuples are allowed \
2129            to make individual selection for different IFs."
2130
2131        curedge = (0, 0)
2132        if individualedge:
2133            for edgepar in edge:
2134                if not _is_valid(edgepar, int):
2135                    raise ValueError, "Each element of the 'edge' tuple has \
2136                                       to be a pair of integers or an integer."
2137        else:
2138            curedge = edge;
2139
2140        if not insitu:
2141            workscan = self.copy()
2142        else:
2143            workscan = self
2144
2145        # setup fitter
2146        f = fitter()
2147        f.set_function(lpoly=order)
2148
2149        # setup line finder
2150        fl = linefinder()
2151        fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
2152
2153        fl.set_scan(workscan)
2154
2155        if mask is None:
2156            mask = _n_bools(workscan.nchan(), True)
2157       
2158        if rows is None:
2159            rows = xrange(workscan.nrow())
2160        elif isinstance(rows, int):
2161            rows = [ rows ]
2162       
2163        # Save parameters of baseline fits & masklists as a class attribute.
2164        # NOTICE: It does not reflect changes in scantable!
2165        if len(rows) > 0:
2166            self.blpars=[]
2167            self.masklists=[]
2168            self.actualmask=[]
2169        asaplog.push("Processing:")
2170        for r in rows:
2171            msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
2172                (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
2173                 workscan.getpol(r), workscan.getcycle(r))
2174            asaplog.push(msg, False)
2175
2176            # figure out edge parameter
2177            if individualedge:
2178                if len(edge) >= workscan.getif(r):
2179                    raise RuntimeError, "Number of edge elements appear to " \
2180                                        "be less than the number of IFs"
2181                    curedge = edge[workscan.getif(r)]
2182
2183            actualmask = mask_and(mask, workscan._getmask(r))    # (CAS-1434)
2184
2185            # setup line finder
2186            fl.find_lines(r, actualmask, curedge)
2187           
2188            f.x = workscan._getabcissa(r)
2189            f.y = workscan._getspectrum(r)
2190            f.mask = fl.get_mask()
2191            f.data = None
2192            f.fit()
2193
2194            # Show mask list
2195            masklist=workscan.get_masklist(f.mask, row=r)
2196            msg = "mask range: "+str(masklist)
2197            asaplog.push(msg, False)
2198
2199            if plot:
2200                f.plot(residual=True)
2201                x = raw_input("Accept fit ( [y]/n ): ")
2202                if x.upper() == 'N':
2203                    self.blpars.append(None)
2204                    self.masklists.append(None)
2205                    self.actualmask.append(None)
2206                    continue
2207
2208            workscan._setspectrum(f.fitter.getresidual(), r)
2209            self.blpars.append(f.get_parameters())
2210            self.masklists.append(masklist)
2211            self.actualmask.append(f.mask)
2212        if plot:
2213            f._p.unmap()
2214            f._p = None
2215        workscan._add_history("auto_poly_baseline", varlist)
2216        if insitu:
2217            self._assign(workscan)
2218        else:
2219            return workscan
2220
2221    @asaplog_post_dec
2222    def rotate_linpolphase(self, angle):
2223        """\
2224        Rotate the phase of the complex polarization O=Q+iU correlation.
2225        This is always done in situ in the raw data.  So if you call this
2226        function more than once then each call rotates the phase further.
2227
2228        Parameters:
2229
2230            angle:   The angle (degrees) to rotate (add) by.
2231
2232        Example::
2233
2234            scan.rotate_linpolphase(2.3)
2235
2236        """
2237        varlist = vars()
2238        self._math._rotate_linpolphase(self, angle)
2239        self._add_history("rotate_linpolphase", varlist)
2240        return
2241
2242    @asaplog_post_dec
2243    def rotate_xyphase(self, angle):
2244        """\
2245        Rotate the phase of the XY correlation.  This is always done in situ
2246        in the data.  So if you call this function more than once
2247        then each call rotates the phase further.
2248
2249        Parameters:
2250
2251            angle:   The angle (degrees) to rotate (add) by.
2252
2253        Example::
2254
2255            scan.rotate_xyphase(2.3)
2256
2257        """
2258        varlist = vars()
2259        self._math._rotate_xyphase(self, angle)
2260        self._add_history("rotate_xyphase", varlist)
2261        return
2262
2263    @asaplog_post_dec
2264    def swap_linears(self):
2265        """\
2266        Swap the linear polarisations XX and YY, or better the first two
2267        polarisations as this also works for ciculars.
2268        """
2269        varlist = vars()
2270        self._math._swap_linears(self)
2271        self._add_history("swap_linears", varlist)
2272        return
2273
2274    @asaplog_post_dec
2275    def invert_phase(self):
2276        """\
2277        Invert the phase of the complex polarisation
2278        """
2279        varlist = vars()
2280        self._math._invert_phase(self)
2281        self._add_history("invert_phase", varlist)
2282        return
2283
2284    @asaplog_post_dec
2285    def add(self, offset, insitu=None):
2286        """\
2287        Return a scan where all spectra have the offset added
2288
2289        Parameters:
2290
2291            offset:      the offset
2292
2293            insitu:      if False a new scantable is returned.
2294                         Otherwise, the scaling is done in-situ
2295                         The default is taken from .asaprc (False)
2296
2297        """
2298        if insitu is None: insitu = rcParams['insitu']
2299        self._math._setinsitu(insitu)
2300        varlist = vars()
2301        s = scantable(self._math._unaryop(self, offset, "ADD", False))
2302        s._add_history("add", varlist)
2303        if insitu:
2304            self._assign(s)
2305        else:
2306            return s
2307
2308    @asaplog_post_dec
2309    def scale(self, factor, tsys=True, insitu=None):
2310        """\
2311
2312        Return a scan where all spectra are scaled by the give 'factor'
2313
2314        Parameters:
2315
2316            factor:      the scaling factor (float or 1D float list)
2317
2318            insitu:      if False a new scantable is returned.
2319                         Otherwise, the scaling is done in-situ
2320                         The default is taken from .asaprc (False)
2321
2322            tsys:        if True (default) then apply the operation to Tsys
2323                         as well as the data
2324
2325        """
2326        if insitu is None: insitu = rcParams['insitu']
2327        self._math._setinsitu(insitu)
2328        varlist = vars()
2329        s = None
2330        import numpy
2331        if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
2332            if isinstance(factor[0], list) or isinstance(factor[0], numpy.ndarray):
2333                from asapmath import _array2dOp
2334                s = _array2dOp( self.copy(), factor, "MUL", tsys )
2335            else:
2336                s = scantable( self._math._arrayop( self.copy(), factor, "MUL", tsys ) )
2337        else:
2338            s = scantable(self._math._unaryop(self.copy(), factor, "MUL", tsys))
2339        s._add_history("scale", varlist)
2340        if insitu:
2341            self._assign(s)
2342        else:
2343            return s
2344
2345    def set_sourcetype(self, match, matchtype="pattern",
2346                       sourcetype="reference"):
2347        """\
2348        Set the type of the source to be an source or reference scan
2349        using the provided pattern.
2350
2351        Parameters:
2352
2353            match:          a Unix style pattern, regular expression or selector
2354
2355            matchtype:      'pattern' (default) UNIX style pattern or
2356                            'regex' regular expression
2357
2358            sourcetype:     the type of the source to use (source/reference)
2359
2360        """
2361        varlist = vars()
2362        basesel = self.get_selection()
2363        stype = -1
2364        if sourcetype.lower().startswith("r"):
2365            stype = 1
2366        elif sourcetype.lower().startswith("s"):
2367            stype = 0
2368        else:
2369            raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
2370        if matchtype.lower().startswith("p"):
2371            matchtype = "pattern"
2372        elif matchtype.lower().startswith("r"):
2373            matchtype = "regex"
2374        else:
2375            raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
2376        sel = selector()
2377        if isinstance(match, selector):
2378            sel = match
2379        else:
2380            sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
2381        self.set_selection(basesel+sel)
2382        self._setsourcetype(stype)
2383        self.set_selection(basesel)
2384        self._add_history("set_sourcetype", varlist)
2385
2386    @asaplog_post_dec
2387    @preserve_selection
2388    def auto_quotient(self, preserve=True, mode='paired', verify=False):
2389        """\
2390        This function allows to build quotients automatically.
2391        It assumes the observation to have the same number of
2392        "ons" and "offs"
2393
2394        Parameters:
2395
2396            preserve:       you can preserve (default) the continuum or
2397                            remove it.  The equations used are
2398
2399                            preserve: Output = Toff * (on/off) - Toff
2400
2401                            remove:   Output = Toff * (on/off) - Ton
2402
2403            mode:           the on/off detection mode
2404                            'paired' (default)
2405                            identifies 'off' scans by the
2406                            trailing '_R' (Mopra/Parkes) or
2407                            '_e'/'_w' (Tid) and matches
2408                            on/off pairs from the observing pattern
2409                            'time'
2410                            finds the closest off in time
2411
2412        .. todo:: verify argument is not implemented
2413
2414        """
2415        varlist = vars()
2416        modes = ["time", "paired"]
2417        if not mode in modes:
2418            msg = "please provide valid mode. Valid modes are %s" % (modes)
2419            raise ValueError(msg)
2420        s = None
2421        if mode.lower() == "paired":
2422            sel = self.get_selection()
2423            sel.set_query("SRCTYPE==psoff")
2424            self.set_selection(sel)
2425            offs = self.copy()
2426            sel.set_query("SRCTYPE==pson")
2427            self.set_selection(sel)
2428            ons = self.copy()
2429            s = scantable(self._math._quotient(ons, offs, preserve))
2430        elif mode.lower() == "time":
2431            s = scantable(self._math._auto_quotient(self, mode, preserve))
2432        s._add_history("auto_quotient", varlist)
2433        return s
2434
2435    @asaplog_post_dec
2436    def mx_quotient(self, mask = None, weight='median', preserve=True):
2437        """\
2438        Form a quotient using "off" beams when observing in "MX" mode.
2439
2440        Parameters:
2441
2442            mask:           an optional mask to be used when weight == 'stddev'
2443
2444            weight:         How to average the off beams.  Default is 'median'.
2445
2446            preserve:       you can preserve (default) the continuum or
2447                            remove it.  The equations used are:
2448
2449                                preserve: Output = Toff * (on/off) - Toff
2450
2451                                remove:   Output = Toff * (on/off) - Ton
2452
2453        """
2454        mask = mask or ()
2455        varlist = vars()
2456        on = scantable(self._math._mx_extract(self, 'on'))
2457        preoff = scantable(self._math._mx_extract(self, 'off'))
2458        off = preoff.average_time(mask=mask, weight=weight, scanav=False)
2459        from asapmath  import quotient
2460        q = quotient(on, off, preserve)
2461        q._add_history("mx_quotient", varlist)
2462        return q
2463
2464    @asaplog_post_dec
2465    def freq_switch(self, insitu=None):
2466        """\
2467        Apply frequency switching to the data.
2468
2469        Parameters:
2470
2471            insitu:      if False a new scantable is returned.
2472                         Otherwise, the swictching is done in-situ
2473                         The default is taken from .asaprc (False)
2474
2475        """
2476        if insitu is None: insitu = rcParams['insitu']
2477        self._math._setinsitu(insitu)
2478        varlist = vars()
2479        s = scantable(self._math._freqswitch(self))
2480        s._add_history("freq_switch", varlist)
2481        if insitu:
2482            self._assign(s)
2483        else:
2484            return s
2485
2486    @asaplog_post_dec
2487    def recalc_azel(self):
2488        """Recalculate the azimuth and elevation for each position."""
2489        varlist = vars()
2490        self._recalcazel()
2491        self._add_history("recalc_azel", varlist)
2492        return
2493
2494    @asaplog_post_dec
2495    def __add__(self, other):
2496        varlist = vars()
2497        s = None
2498        if isinstance(other, scantable):
2499            s = scantable(self._math._binaryop(self, other, "ADD"))
2500        elif isinstance(other, float):
2501            s = scantable(self._math._unaryop(self, other, "ADD", False))
2502        else:
2503            raise TypeError("Other input is not a scantable or float value")
2504        s._add_history("operator +", varlist)
2505        return s
2506
2507    @asaplog_post_dec
2508    def __sub__(self, other):
2509        """
2510        implicit on all axes and on Tsys
2511        """
2512        varlist = vars()
2513        s = None
2514        if isinstance(other, scantable):
2515            s = scantable(self._math._binaryop(self, other, "SUB"))
2516        elif isinstance(other, float):
2517            s = scantable(self._math._unaryop(self, other, "SUB", False))
2518        else:
2519            raise TypeError("Other input is not a scantable or float value")
2520        s._add_history("operator -", varlist)
2521        return s
2522
2523    @asaplog_post_dec
2524    def __mul__(self, other):
2525        """
2526        implicit on all axes and on Tsys
2527        """
2528        varlist = vars()
2529        s = None
2530        if isinstance(other, scantable):
2531            s = scantable(self._math._binaryop(self, other, "MUL"))
2532        elif isinstance(other, float):
2533            s = scantable(self._math._unaryop(self, other, "MUL", False))
2534        else:
2535            raise TypeError("Other input is not a scantable or float value")
2536        s._add_history("operator *", varlist)
2537        return s
2538
2539
2540    @asaplog_post_dec
2541    def __div__(self, other):
2542        """
2543        implicit on all axes and on Tsys
2544        """
2545        varlist = vars()
2546        s = None
2547        if isinstance(other, scantable):
2548            s = scantable(self._math._binaryop(self, other, "DIV"))
2549        elif isinstance(other, float):
2550            if other == 0.0:
2551                raise ZeroDivisionError("Dividing by zero is not recommended")
2552            s = scantable(self._math._unaryop(self, other, "DIV", False))
2553        else:
2554            raise TypeError("Other input is not a scantable or float value")
2555        s._add_history("operator /", varlist)
2556        return s
2557
2558    @asaplog_post_dec
2559    def get_fit(self, row=0):
2560        """\
2561        Print or return the stored fits for a row in the scantable
2562
2563        Parameters:
2564
2565            row:    the row which the fit has been applied to.
2566
2567        """
2568        if row > self.nrow():
2569            return
2570        from asap.asapfit import asapfit
2571        fit = asapfit(self._getfit(row))
2572        asaplog.push( '%s' %(fit) )
2573        return fit.as_dict()
2574
2575    def flag_nans(self):
2576        """\
2577        Utility function to flag NaN values in the scantable.
2578        """
2579        import numpy
2580        basesel = self.get_selection()
2581        for i in range(self.nrow()):
2582            sel = self.get_row_selector(i)
2583            self.set_selection(basesel+sel)
2584            nans = numpy.isnan(self._getspectrum(0))
2585        if numpy.any(nans):
2586            bnans = [ bool(v) for v in nans]
2587            self.flag(bnans)
2588        self.set_selection(basesel)
2589
2590    def get_row_selector(self, rowno):
2591        return selector(beams=self.getbeam(rowno),
2592                        ifs=self.getif(rowno),
2593                        pols=self.getpol(rowno),
2594                        scans=self.getscan(rowno),
2595                        cycles=self.getcycle(rowno))
2596
2597    def _add_history(self, funcname, parameters):
2598        if not rcParams['scantable.history']:
2599            return
2600        # create date
2601        sep = "##"
2602        from datetime import datetime
2603        dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
2604        hist = dstr+sep
2605        hist += funcname+sep#cdate+sep
2606        if parameters.has_key('self'): del parameters['self']
2607        for k, v in parameters.iteritems():
2608            if type(v) is dict:
2609                for k2, v2 in v.iteritems():
2610                    hist += k2
2611                    hist += "="
2612                    if isinstance(v2, scantable):
2613                        hist += 'scantable'
2614                    elif k2 == 'mask':
2615                        if isinstance(v2, list) or isinstance(v2, tuple):
2616                            hist += str(self._zip_mask(v2))
2617                        else:
2618                            hist += str(v2)
2619                    else:
2620                        hist += str(v2)
2621            else:
2622                hist += k
2623                hist += "="
2624                if isinstance(v, scantable):
2625                    hist += 'scantable'
2626                elif k == 'mask':
2627                    if isinstance(v, list) or isinstance(v, tuple):
2628                        hist += str(self._zip_mask(v))
2629                    else:
2630                        hist += str(v)
2631                else:
2632                    hist += str(v)
2633            hist += sep
2634        hist = hist[:-2] # remove trailing '##'
2635        self._addhistory(hist)
2636
2637
2638    def _zip_mask(self, mask):
2639        mask = list(mask)
2640        i = 0
2641        segments = []
2642        while mask[i:].count(1):
2643            i += mask[i:].index(1)
2644            if mask[i:].count(0):
2645                j = i + mask[i:].index(0)
2646            else:
2647                j = len(mask)
2648            segments.append([i, j])
2649            i = j
2650        return segments
2651
2652    def _get_ordinate_label(self):
2653        fu = "("+self.get_fluxunit()+")"
2654        import re
2655        lbl = "Intensity"
2656        if re.match(".K.", fu):
2657            lbl = "Brightness Temperature "+ fu
2658        elif re.match(".Jy.", fu):
2659            lbl = "Flux density "+ fu
2660        return lbl
2661
2662    def _check_ifs(self):
2663        nchans = [self.nchan(i) for i in range(self.nif(-1))]
2664        nchans = filter(lambda t: t > 0, nchans)
2665        return (sum(nchans)/len(nchans) == nchans[0])
2666
2667    @asaplog_post_dec
2668    def _fill(self, names, unit, average, getpt, antenna):
2669        first = True
2670        fullnames = []
2671        for name in names:
2672            name = os.path.expandvars(name)
2673            name = os.path.expanduser(name)
2674            if not os.path.exists(name):
2675                msg = "File '%s' does not exists" % (name)
2676                raise IOError(msg)
2677            fullnames.append(name)
2678        if average:
2679            asaplog.push('Auto averaging integrations')
2680        stype = int(rcParams['scantable.storage'].lower() == 'disk')
2681        for name in fullnames:
2682            tbl = Scantable(stype)
2683            r = filler(tbl)
2684            rx = rcParams['scantable.reference']
2685            r.setreferenceexpr(rx)
2686            msg = "Importing %s..." % (name)
2687            asaplog.push(msg, False)
2688            opts = {'ms': {'antenna' : antenna, 'getpt': getpt} }
2689            r.open(name, opts)# antenna, -1, -1, getpt)
2690            r.fill()
2691            if average:
2692                tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
2693            if not first:
2694                tbl = self._math._merge([self, tbl])
2695            Scantable.__init__(self, tbl)
2696            r.close()
2697            del r, tbl
2698            first = False
2699            #flush log
2700        asaplog.post()
2701        if unit is not None:
2702            self.set_fluxunit(unit)
2703        if not is_casapy():
2704            self.set_freqframe(rcParams['scantable.freqframe'])
2705
2706    def __getitem__(self, key):
2707        if key < 0:
2708            key += self.nrow()
2709        if key >= self.nrow():
2710            raise IndexError("Row index out of range.")
2711        return self._getspectrum(key)
2712
2713    def __setitem__(self, key, value):
2714        if key < 0:
2715            key += self.nrow()
2716        if key >= self.nrow():
2717            raise IndexError("Row index out of range.")
2718        if not hasattr(value, "__len__") or \
2719                len(value) > self.nchan(self.getif(key)):
2720            raise ValueError("Spectrum length doesn't match.")
2721        return self._setspectrum(value, key)
2722
2723    def __len__(self):
2724        return self.nrow()
2725
2726    def __iter__(self):
2727        for i in range(len(self)):
2728            yield self[i]
Note: See TracBrowser for help on using the repository browser.