source: trunk/python/scantable.py @ 1909

Last change on this file since 1909 was 1909, checked in by WataruKawasaki, 14 years ago

New Development: No

JIRA Issue: Yes CAS-1937

Ready to Release: Yes

Interface Changes: No

What Interface Changed:

Test Programs:

Put in Release Notes: No

Module(s): sdbaseline

Description: Removing an obsolete function print_log().


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 92.2 KB
Line 
1"""This module defines the scantable class."""
2
3import os
4try:
5    from functools import wraps as wraps_dec
6except ImportError:
7    from asap.compatibility import wraps as wraps_dec
8
9from asap.env import is_casapy
10from asap._asap import Scantable
11from asap._asap import filler
12from asap.parameters import rcParams
13from asap.logging import asaplog, asaplog_post_dec
14from asap.selector import selector
15from asap.linecatalog import linecatalog
16from asap.coordinate import coordinate
17from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
18from asap.asapfitter import fitter
19
20
21def preserve_selection(func):
22    @wraps_dec(func)
23    def wrap(obj, *args, **kw):
24        basesel = obj.get_selection()
25        try:
26            val = func(obj, *args, **kw)
27        finally:
28            obj.set_selection(basesel)
29        return val
30    return wrap
31
32def is_scantable(filename):
33    """Is the given file a scantable?
34
35    Parameters:
36
37        filename: the name of the file/directory to test
38
39    """
40    if ( os.path.isdir(filename)
41         and os.path.exists(filename+'/table.info')
42         and os.path.exists(filename+'/table.dat') ):
43        f=open(filename+'/table.info')
44        l=f.readline()
45        f.close()
46        #if ( l.find('Scantable') != -1 ):
47        if ( l.find('Measurement Set') == -1 ):
48            return True
49        else:
50            return False
51    else:
52        return False
53##     return (os.path.isdir(filename)
54##             and not os.path.exists(filename+'/table.f1')
55##             and os.path.exists(filename+'/table.info'))
56
57def is_ms(filename):
58    """Is the given file a MeasurementSet?
59
60    Parameters:
61
62        filename: the name of the file/directory to test
63
64    """
65    if ( os.path.isdir(filename)
66         and os.path.exists(filename+'/table.info')
67         and os.path.exists(filename+'/table.dat') ):
68        f=open(filename+'/table.info')
69        l=f.readline()
70        f.close()
71        if ( l.find('Measurement Set') != -1 ):
72            return True
73        else:
74            return False
75    else:
76        return False
77   
78class scantable(Scantable):
79    """\
80        The ASAP container for scans (single-dish data).
81    """
82
83    @asaplog_post_dec
84    def __init__(self, filename, average=None, unit=None, getpt=None,
85                 antenna=None, parallactify=None):
86        """\
87        Create a scantable from a saved one or make a reference
88
89        Parameters:
90
91            filename:     the name of an asap table on disk
92                          or
93                          the name of a rpfits/sdfits/ms file
94                          (integrations within scans are auto averaged
95                          and the whole file is read) or
96                          [advanced] a reference to an existing scantable
97
98            average:      average all integrations withinb a scan on read.
99                          The default (True) is taken from .asaprc.
100
101            unit:         brightness unit; must be consistent with K or Jy.
102                          Over-rides the default selected by the filler
103                          (input rpfits/sdfits/ms) or replaces the value
104                          in existing scantables
105
106            getpt:        for MeasurementSet input data only:
107                          If True, all pointing data are filled.
108                          The deafult is False, which makes time to load
109                          the MS data faster in some cases.
110
111            antenna:      Antenna selection. integer (id) or string (name or id).
112
113            parallactify: Indicate that the data had been parallatified. Default
114                          is taken from rc file.
115
116        """
117        if average is None:
118            average = rcParams['scantable.autoaverage']
119        if getpt is None:
120            getpt = True
121        if antenna is not None:
122            asaplog.push("Antenna selection currently unsupported."
123                         "Using ''")
124            asaplog.post('WARN')
125        if antenna is None:
126            antenna = ''
127        elif type(antenna) == int:
128            antenna = '%s' % antenna
129        elif type(antenna) == list:
130            tmpstr = ''
131            for i in range( len(antenna) ):
132                if type(antenna[i]) == int:
133                    tmpstr = tmpstr + ('%s,'%(antenna[i]))
134                elif type(antenna[i]) == str:
135                    tmpstr=tmpstr+antenna[i]+','
136                else:
137                    raise TypeError('Bad antenna selection.')
138            antenna = tmpstr.rstrip(',')
139        parallactify = parallactify or rcParams['scantable.parallactify']
140        varlist = vars()
141        from asap._asap import stmath
142        self._math = stmath( rcParams['insitu'] )
143        if isinstance(filename, Scantable):
144            Scantable.__init__(self, filename)
145        else:
146            if isinstance(filename, str):
147                filename = os.path.expandvars(filename)
148                filename = os.path.expanduser(filename)
149                if not os.path.exists(filename):
150                    s = "File '%s' not found." % (filename)
151                    raise IOError(s)
152                if is_scantable(filename):
153                    ondisk = rcParams['scantable.storage'] == 'disk'
154                    Scantable.__init__(self, filename, ondisk)
155                    if unit is not None:
156                        self.set_fluxunit(unit)
157                    # do not reset to the default freqframe
158                    #self.set_freqframe(rcParams['scantable.freqframe'])
159                #elif os.path.isdir(filename) \
160                #         and not os.path.exists(filename+'/table.f1'):
161                elif is_ms(filename):
162                    self._fill([filename], unit, average, getpt, antenna)
163                elif os.path.isfile(filename):
164                    self._fill([filename], unit, average, getpt, antenna)
165                else:
166                    msg = "The given file '%s'is not a valid " \
167                          "asap table." % (filename)
168                    raise IOError(msg)
169            elif (isinstance(filename, list) or isinstance(filename, tuple)) \
170                  and isinstance(filename[-1], str):
171                self._fill(filename, unit, average, getpt, antenna)
172        self.parallactify(parallactify)
173        self._add_history("scantable", varlist)
174
175    @asaplog_post_dec
176    def save(self, name=None, format=None, overwrite=False):
177        """\
178        Store the scantable on disk. This can be an asap (aips++) Table,
179        SDFITS or MS2 format.
180
181        Parameters:
182
183            name:        the name of the outputfile. For format "ASCII"
184                         this is the root file name (data in 'name'.txt
185                         and header in 'name'_header.txt)
186
187            format:      an optional file format. Default is ASAP.
188                         Allowed are:
189
190                            * 'ASAP' (save as ASAP [aips++] Table),
191                            * 'SDFITS' (save as SDFITS file)
192                            * 'ASCII' (saves as ascii text file)
193                            * 'MS2' (saves as an casacore MeasurementSet V2)
194                            * 'FITS' (save as image FITS - not readable by class)
195                            * 'CLASS' (save as FITS readable by CLASS)
196
197            overwrite:   If the file should be overwritten if it exists.
198                         The default False is to return with warning
199                         without writing the output. USE WITH CARE.
200
201        Example::
202
203            scan.save('myscan.asap')
204            scan.save('myscan.sdfits', 'SDFITS')
205
206        """
207        from os import path
208        format = format or rcParams['scantable.save']
209        suffix = '.'+format.lower()
210        if name is None or name == "":
211            name = 'scantable'+suffix
212            msg = "No filename given. Using default name %s..." % name
213            asaplog.push(msg)
214        name = path.expandvars(name)
215        if path.isfile(name) or path.isdir(name):
216            if not overwrite:
217                msg = "File %s exists." % name
218                raise IOError(msg)
219        format2 = format.upper()
220        if format2 == 'ASAP':
221            self._save(name)
222        else:
223            from asap._asap import stwriter as stw
224            writer = stw(format2)
225            writer.write(self, name)
226        return
227
228    def copy(self):
229        """Return a copy of this scantable.
230
231        *Note*:
232
233            This makes a full (deep) copy. scan2 = scan1 makes a reference.
234
235        Example::
236
237            copiedscan = scan.copy()
238
239        """
240        sd = scantable(Scantable._copy(self))
241        return sd
242
243    def drop_scan(self, scanid=None):
244        """\
245        Return a new scantable where the specified scan number(s) has(have)
246        been dropped.
247
248        Parameters:
249
250            scanid:    a (list of) scan number(s)
251
252        """
253        from asap import _is_sequence_or_number as _is_valid
254        from asap import _to_list
255        from asap import unique
256        if not _is_valid(scanid):
257            raise RuntimeError( 'Please specify a scanno to drop from the scantable' )
258        scanid = _to_list(scanid)
259        allscans = unique([ self.getscan(i) for i in range(self.nrow())])
260        for sid in scanid: allscans.remove(sid)
261        if len(allscans) == 0:
262            raise ValueError("Can't remove all scans")
263        sel = selector(scans=allscans)
264        return self._select_copy(sel)
265
266    def _select_copy(self, selection):
267        orig = self.get_selection()
268        self.set_selection(orig+selection)
269        cp = self.copy()
270        self.set_selection(orig)
271        return cp
272
273    def get_scan(self, scanid=None):
274        """\
275        Return a specific scan (by scanno) or collection of scans (by
276        source name) in a new scantable.
277
278        *Note*:
279
280            See scantable.drop_scan() for the inverse operation.
281
282        Parameters:
283
284            scanid:    a (list of) scanno or a source name, unix-style
285                       patterns are accepted for source name matching, e.g.
286                       '*_R' gets all 'ref scans
287
288        Example::
289
290            # get all scans containing the source '323p459'
291            newscan = scan.get_scan('323p459')
292            # get all 'off' scans
293            refscans = scan.get_scan('*_R')
294            # get a susbset of scans by scanno (as listed in scan.summary())
295            newscan = scan.get_scan([0, 2, 7, 10])
296
297        """
298        if scanid is None:
299            raise RuntimeError( 'Please specify a scan no or name to '
300                                'retrieve from the scantable' )
301        try:
302            bsel = self.get_selection()
303            sel = selector()
304            if type(scanid) is str:
305                sel.set_name(scanid)
306                return self._select_copy(sel)
307            elif type(scanid) is int:
308                sel.set_scans([scanid])
309                return self._select_copy(sel)
310            elif type(scanid) is list:
311                sel.set_scans(scanid)
312                return self._select_copy(sel)
313            else:
314                msg = "Illegal scanid type, use 'int' or 'list' if ints."
315                raise TypeError(msg)
316        except RuntimeError:
317            raise
318
319    def __str__(self):
320        return Scantable._summary(self, True)
321
322    def summary(self, filename=None):
323        """\
324        Print a summary of the contents of this scantable.
325
326        Parameters:
327
328            filename:    the name of a file to write the putput to
329                         Default - no file output
330
331        """
332        info = Scantable._summary(self, True)
333        if filename is not None:
334            if filename is "":
335                filename = 'scantable_summary.txt'
336            from os.path import expandvars, isdir
337            filename = expandvars(filename)
338            if not isdir(filename):
339                data = open(filename, 'w')
340                data.write(info)
341                data.close()
342            else:
343                msg = "Illegal file name '%s'." % (filename)
344                raise IOError(msg)
345        return page(info)
346
347    def get_spectrum(self, rowno):
348        """Return the spectrum for the current row in the scantable as a list.
349
350        Parameters:
351
352             rowno:   the row number to retrieve the spectrum from
353
354        """
355        return self._getspectrum(rowno)
356
357    def get_mask(self, rowno):
358        """Return the mask for the current row in the scantable as a list.
359
360        Parameters:
361
362             rowno:   the row number to retrieve the mask from
363
364        """
365        return self._getmask(rowno)
366
367    def set_spectrum(self, spec, rowno):
368        """Return the spectrum for the current row in the scantable as a list.
369
370        Parameters:
371
372             spec:   the new spectrum
373
374             rowno:  the row number to set the spectrum for
375
376        """
377        assert(len(spec) == self.nchan())
378        return self._setspectrum(spec, rowno)
379
380    def get_coordinate(self, rowno):
381        """Return the (spectral) coordinate for a a given 'rowno'.
382
383        *Note*:
384
385            * This coordinate is only valid until a scantable method modifies
386              the frequency axis.
387            * This coordinate does contain the original frequency set-up
388              NOT the new frame. The conversions however are done using the user
389              specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
390              use scantable.freq_align first. Without it there is no closure,
391              i.e.::
392
393                  c = myscan.get_coordinate(0)
394                  c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
395
396        Parameters:
397
398             rowno:    the row number for the spectral coordinate
399
400        """
401        return coordinate(Scantable.get_coordinate(self, rowno))
402
403    def get_selection(self):
404        """\
405        Get the selection object currently set on this scantable.
406
407        Example::
408
409            sel = scan.get_selection()
410            sel.set_ifs(0)              # select IF 0
411            scan.set_selection(sel)     # apply modified selection
412
413        """
414        return selector(self._getselection())
415
416    def set_selection(self, selection=None, **kw):
417        """\
418        Select a subset of the data. All following operations on this scantable
419        are only applied to thi selection.
420
421        Parameters:
422
423            selection:    a selector object (default unset the selection), or
424                          any combination of "pols", "ifs", "beams", "scans",
425                          "cycles", "name", "query"
426
427        Examples::
428
429            sel = selector()         # create a selection object
430            self.set_scans([0, 3])    # select SCANNO 0 and 3
431            scan.set_selection(sel)  # set the selection
432            scan.summary()           # will only print summary of scanno 0 an 3
433            scan.set_selection()     # unset the selection
434            # or the equivalent
435            scan.set_selection(scans=[0,3])
436            scan.summary()           # will only print summary of scanno 0 an 3
437            scan.set_selection()     # unset the selection
438
439        """
440        if selection is None:
441            # reset
442            if len(kw) == 0:
443                selection = selector()
444            else:
445                # try keywords
446                for k in kw:
447                    if k not in selector.fields:
448                        raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
449                selection = selector(**kw)
450        self._setselection(selection)
451
452    def get_row(self, row=0, insitu=None):
453        """\
454        Select a row in the scantable.
455        Return a scantable with single row.
456
457        Parameters:
458
459            row:    row no of integration, default is 0.
460            insitu: if False a new scantable is returned. Otherwise, the
461                    scaling is done in-situ. The default is taken from .asaprc
462                    (False)
463
464        """
465        if insitu is None: insitu = rcParams['insitu']
466        if not insitu:
467            workscan = self.copy()
468        else:
469            workscan = self
470        # Select a row
471        sel=selector()
472        sel.set_scans([workscan.getscan(row)])
473        sel.set_cycles([workscan.getcycle(row)])
474        sel.set_beams([workscan.getbeam(row)])
475        sel.set_ifs([workscan.getif(row)])
476        sel.set_polarisations([workscan.getpol(row)])
477        sel.set_name(workscan._getsourcename(row))
478        workscan.set_selection(sel)
479        if not workscan.nrow() == 1:
480            msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
481            raise RuntimeError(msg)
482        del sel
483        if insitu:
484            self._assign(workscan)
485        else:
486            return workscan
487
488    @asaplog_post_dec
489    def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
490        """\
491        Determine the specified statistic of the current beam/if/pol
492        Takes a 'mask' as an optional parameter to specify which
493        channels should be excluded.
494
495        Parameters:
496
497            stat:    'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
498                     'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
499
500            mask:    an optional mask specifying where the statistic
501                     should be determined.
502
503            form:    format string to print statistic values
504
505            row:     row number of spectrum to process.
506                     (default is None: for all rows)
507
508        Example:
509            scan.set_unit('channel')
510            msk = scan.create_mask([100, 200], [500, 600])
511            scan.stats(stat='mean', mask=m)
512
513        """
514        mask = mask or []
515        if not self._check_ifs():
516            raise ValueError("Cannot apply mask as the IFs have different "
517                             "number of channels. Please use setselection() "
518                             "to select individual IFs")
519        rtnabc = False
520        if stat.lower().endswith('_abc'): rtnabc = True
521        getchan = False
522        if stat.lower().startswith('min') or stat.lower().startswith('max'):
523            chan = self._math._minmaxchan(self, mask, stat)
524            getchan = True
525            statvals = []
526        if not rtnabc:
527            if row == None:
528                statvals = self._math._stats(self, mask, stat)
529            else:
530                statvals = self._math._statsrow(self, mask, stat, int(row))
531
532        #def cb(i):
533        #    return statvals[i]
534
535        #return self._row_callback(cb, stat)
536
537        label=stat
538        #callback=cb
539        out = ""
540        #outvec = []
541        sep = '-'*50
542
543        if row == None:
544            rows = xrange(self.nrow())
545        elif isinstance(row, int):
546            rows = [ row ]
547
548        for i in rows:
549            refstr = ''
550            statunit= ''
551            if getchan:
552                qx, qy = self.chan2data(rowno=i, chan=chan[i])
553                if rtnabc:
554                    statvals.append(qx['value'])
555                    refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
556                    statunit= '['+qx['unit']+']'
557                else:
558                    refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
559
560            tm = self._gettime(i)
561            src = self._getsourcename(i)
562            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
563            out += 'Time[%s]:\n' % (tm)
564            if self.nbeam(-1) > 1: out +=  ' Beam[%d] ' % (self.getbeam(i))
565            if self.nif(-1) > 1:   out +=  ' IF[%d] ' % (self.getif(i))
566            if self.npol(-1) > 1:  out +=  ' Pol[%d] ' % (self.getpol(i))
567            #outvec.append(callback(i))
568            if len(rows) > 1:
569                # out += ('= %'+form) % (outvec[i]) +'   '+refstr+'\n'
570                out += ('= %'+form) % (statvals[i]) +'   '+refstr+'\n'
571            else:
572                # out += ('= %'+form) % (outvec[0]) +'   '+refstr+'\n'
573                out += ('= %'+form) % (statvals[0]) +'   '+refstr+'\n'
574            out +=  sep+"\n"
575
576        import os
577        if os.environ.has_key( 'USER' ):
578            usr = os.environ['USER']
579        else:
580            import commands
581            usr = commands.getoutput( 'whoami' )
582        tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
583        f = open(tmpfile,'w')
584        print >> f, sep
585        print >> f, ' %s %s' % (label, statunit)
586        print >> f, sep
587        print >> f, out
588        f.close()
589        f = open(tmpfile,'r')
590        x = f.readlines()
591        f.close()
592        asaplog.push(''.join(x), False)
593
594        return statvals
595
596    def chan2data(self, rowno=0, chan=0):
597        """\
598        Returns channel/frequency/velocity and spectral value
599        at an arbitrary row and channel in the scantable.
600
601        Parameters:
602
603            rowno:   a row number in the scantable. Default is the
604                     first row, i.e. rowno=0
605
606            chan:    a channel in the scantable. Default is the first
607                     channel, i.e. pos=0
608
609        """
610        if isinstance(rowno, int) and isinstance(chan, int):
611            qx = {'unit': self.get_unit(),
612                  'value': self._getabcissa(rowno)[chan]}
613            qy = {'unit': self.get_fluxunit(),
614                  'value': self._getspectrum(rowno)[chan]}
615            return qx, qy
616
617    def stddev(self, mask=None):
618        """\
619        Determine the standard deviation of the current beam/if/pol
620        Takes a 'mask' as an optional parameter to specify which
621        channels should be excluded.
622
623        Parameters:
624
625            mask:    an optional mask specifying where the standard
626                     deviation should be determined.
627
628        Example::
629
630            scan.set_unit('channel')
631            msk = scan.create_mask([100, 200], [500, 600])
632            scan.stddev(mask=m)
633
634        """
635        return self.stats(stat='stddev', mask=mask);
636
637
638    def get_column_names(self):
639        """\
640        Return a  list of column names, which can be used for selection.
641        """
642        return list(Scantable.get_column_names(self))
643
644    def get_tsys(self, row=-1):
645        """\
646        Return the System temperatures.
647
648        Parameters:
649
650            row:    the rowno to get the information for. (default all rows)
651
652        Returns:
653
654            a list of Tsys values for the current selection
655
656        """
657        if row > -1:
658            return self._get_column(self._gettsys, row)
659        return self._row_callback(self._gettsys, "Tsys")
660
661
662    def get_weather(self, row=-1):
663        """\
664        Return the weather informations.
665
666        Parameters:
667
668            row:    the rowno to get the information for. (default all rows)
669
670        Returns:
671
672            a dict or list of of dicts of values for the current selection
673
674        """
675
676        values = self._get_column(self._get_weather, row)
677        if row > -1:
678            return {'temperature': values[0],
679                    'pressure': values[1], 'humidity' : values[2],
680                    'windspeed' : values[3], 'windaz' : values[4]
681                    }
682        else:
683            out = []
684            for r in values:
685
686                out.append({'temperature': r[0],
687                            'pressure': r[1], 'humidity' : r[2],
688                            'windspeed' : r[3], 'windaz' : r[4]
689                    })
690            return out
691
692    def _row_callback(self, callback, label):
693        out = ""
694        outvec = []
695        sep = '-'*50
696        for i in range(self.nrow()):
697            tm = self._gettime(i)
698            src = self._getsourcename(i)
699            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
700            out += 'Time[%s]:\n' % (tm)
701            if self.nbeam(-1) > 1:
702                out +=  ' Beam[%d] ' % (self.getbeam(i))
703            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (self.getif(i))
704            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (self.getpol(i))
705            outvec.append(callback(i))
706            out += '= %3.3f\n' % (outvec[i])
707            out +=  sep+'\n'
708
709        asaplog.push(sep)
710        asaplog.push(" %s" % (label))
711        asaplog.push(sep)
712        asaplog.push(out)
713        asaplog.post()
714        return outvec
715
716    def _get_column(self, callback, row=-1):
717        """
718        """
719        if row == -1:
720            return [callback(i) for i in range(self.nrow())]
721        else:
722            if  0 <= row < self.nrow():
723                return callback(row)
724
725
726    def get_time(self, row=-1, asdatetime=False):
727        """\
728        Get a list of time stamps for the observations.
729        Return a datetime object for each integration time stamp in the scantable.
730
731        Parameters:
732
733            row:          row no of integration. Default -1 return all rows
734
735            asdatetime:   return values as datetime objects rather than strings
736
737        """
738        from time import strptime
739        from datetime import datetime
740        times = self._get_column(self._gettime, row)
741        if not asdatetime:
742            return times
743        format = "%Y/%m/%d/%H:%M:%S"
744        if isinstance(times, list):
745            return [datetime(*strptime(i, format)[:6]) for i in times]
746        else:
747            return datetime(*strptime(times, format)[:6])
748
749
750    def get_inttime(self, row=-1):
751        """\
752        Get a list of integration times for the observations.
753        Return a time in seconds for each integration in the scantable.
754
755        Parameters:
756
757            row:    row no of integration. Default -1 return all rows.
758
759        """
760        return self._get_column(self._getinttime, row)
761
762
763    def get_sourcename(self, row=-1):
764        """\
765        Get a list source names for the observations.
766        Return a string for each integration in the scantable.
767        Parameters:
768
769            row:    row no of integration. Default -1 return all rows.
770
771        """
772        return self._get_column(self._getsourcename, row)
773
774    def get_elevation(self, row=-1):
775        """\
776        Get a list of elevations for the observations.
777        Return a float for each integration in the scantable.
778
779        Parameters:
780
781            row:    row no of integration. Default -1 return all rows.
782
783        """
784        return self._get_column(self._getelevation, row)
785
786    def get_azimuth(self, row=-1):
787        """\
788        Get a list of azimuths for the observations.
789        Return a float for each integration in the scantable.
790
791        Parameters:
792            row:    row no of integration. Default -1 return all rows.
793
794        """
795        return self._get_column(self._getazimuth, row)
796
797    def get_parangle(self, row=-1):
798        """\
799        Get a list of parallactic angles for the observations.
800        Return a float for each integration in the scantable.
801
802        Parameters:
803
804            row:    row no of integration. Default -1 return all rows.
805
806        """
807        return self._get_column(self._getparangle, row)
808
809    def get_direction(self, row=-1):
810        """
811        Get a list of Positions on the sky (direction) for the observations.
812        Return a string for each integration in the scantable.
813
814        Parameters:
815
816            row:    row no of integration. Default -1 return all rows
817
818        """
819        return self._get_column(self._getdirection, row)
820
821    def get_directionval(self, row=-1):
822        """\
823        Get a list of Positions on the sky (direction) for the observations.
824        Return a float for each integration in the scantable.
825
826        Parameters:
827
828            row:    row no of integration. Default -1 return all rows
829
830        """
831        return self._get_column(self._getdirectionvec, row)
832
833    @asaplog_post_dec
834    def set_unit(self, unit='channel'):
835        """\
836        Set the unit for all following operations on this scantable
837
838        Parameters:
839
840            unit:    optional unit, default is 'channel'. Use one of '*Hz',
841                     'km/s', 'channel' or equivalent ''
842
843        """
844        varlist = vars()
845        if unit in ['', 'pixel', 'channel']:
846            unit = ''
847        inf = list(self._getcoordinfo())
848        inf[0] = unit
849        self._setcoordinfo(inf)
850        self._add_history("set_unit", varlist)
851
852    @asaplog_post_dec
853    def set_instrument(self, instr):
854        """\
855        Set the instrument for subsequent processing.
856
857        Parameters:
858
859            instr:    Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
860                      'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
861
862        """
863        self._setInstrument(instr)
864        self._add_history("set_instument", vars())
865
866    @asaplog_post_dec
867    def set_feedtype(self, feedtype):
868        """\
869        Overwrite the feed type, which might not be set correctly.
870
871        Parameters:
872
873            feedtype:     'linear' or 'circular'
874
875        """
876        self._setfeedtype(feedtype)
877        self._add_history("set_feedtype", vars())
878
879    @asaplog_post_dec
880    def set_doppler(self, doppler='RADIO'):
881        """\
882        Set the doppler for all following operations on this scantable.
883
884        Parameters:
885
886            doppler:    One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
887
888        """
889        varlist = vars()
890        inf = list(self._getcoordinfo())
891        inf[2] = doppler
892        self._setcoordinfo(inf)
893        self._add_history("set_doppler", vars())
894
895    @asaplog_post_dec
896    def set_freqframe(self, frame=None):
897        """\
898        Set the frame type of the Spectral Axis.
899
900        Parameters:
901
902            frame:   an optional frame type, default 'LSRK'. Valid frames are:
903                     'TOPO', 'LSRD', 'LSRK', 'BARY',
904                     'GEO', 'GALACTO', 'LGROUP', 'CMB'
905
906        Example::
907
908            scan.set_freqframe('BARY')
909
910        """
911        frame = frame or rcParams['scantable.freqframe']
912        varlist = vars()
913        # "REST" is not implemented in casacore
914        #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
915        #           'GEO', 'GALACTO', 'LGROUP', 'CMB']
916        valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
917                   'GEO', 'GALACTO', 'LGROUP', 'CMB']
918
919        if frame in valid:
920            inf = list(self._getcoordinfo())
921            inf[1] = frame
922            self._setcoordinfo(inf)
923            self._add_history("set_freqframe", varlist)
924        else:
925            msg  = "Please specify a valid freq type. Valid types are:\n", valid
926            raise TypeError(msg)
927
928    @asaplog_post_dec
929    def set_dirframe(self, frame=""):
930        """\
931        Set the frame type of the Direction on the sky.
932
933        Parameters:
934
935            frame:   an optional frame type, default ''. Valid frames are:
936                     'J2000', 'B1950', 'GALACTIC'
937
938        Example:
939
940            scan.set_dirframe('GALACTIC')
941
942        """
943        varlist = vars()
944        Scantable.set_dirframe(self, frame)
945        self._add_history("set_dirframe", varlist)
946
947    def get_unit(self):
948        """\
949        Get the default unit set in this scantable
950
951        Returns:
952
953            A unit string
954
955        """
956        inf = self._getcoordinfo()
957        unit = inf[0]
958        if unit == '': unit = 'channel'
959        return unit
960
961    @asaplog_post_dec
962    def get_abcissa(self, rowno=0):
963        """\
964        Get the abcissa in the current coordinate setup for the currently
965        selected Beam/IF/Pol
966
967        Parameters:
968
969            rowno:    an optional row number in the scantable. Default is the
970                      first row, i.e. rowno=0
971
972        Returns:
973
974            The abcissa values and the format string (as a dictionary)
975
976        """
977        abc = self._getabcissa(rowno)
978        lbl = self._getabcissalabel(rowno)
979        return abc, lbl
980
981    @asaplog_post_dec
982    def flag(self, mask=None, unflag=False):
983        """\
984        Flag the selected data using an optional channel mask.
985
986        Parameters:
987
988            mask:   an optional channel mask, created with create_mask. Default
989                    (no mask) is all channels.
990
991            unflag:    if True, unflag the data
992
993        """
994        varlist = vars()
995        mask = mask or []
996        self._flag(mask, unflag)
997        self._add_history("flag", varlist)
998
999    @asaplog_post_dec
1000    def flag_row(self, rows=[], unflag=False):
1001        """\
1002        Flag the selected data in row-based manner.
1003
1004        Parameters:
1005
1006            rows:   list of row numbers to be flagged. Default is no row
1007                    (must be explicitly specified to execute row-based flagging).
1008
1009            unflag: if True, unflag the data.
1010
1011        """
1012        varlist = vars()
1013        self._flag_row(rows, unflag)
1014        self._add_history("flag_row", varlist)
1015
1016    @asaplog_post_dec
1017    def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
1018        """\
1019        Flag the selected data outside a specified range (in channel-base)
1020
1021        Parameters:
1022
1023            uthres:      upper threshold.
1024
1025            dthres:      lower threshold
1026
1027            clipoutside: True for flagging data outside the range [dthres:uthres].
1028                         False for glagging data inside the range.
1029
1030            unflag:      if True, unflag the data.
1031
1032        """
1033        varlist = vars()
1034        self._clip(uthres, dthres, clipoutside, unflag)
1035        self._add_history("clip", varlist)
1036
1037    @asaplog_post_dec
1038    def lag_flag(self, start, end, unit="MHz", insitu=None):
1039        """\
1040        Flag the data in 'lag' space by providing a frequency to remove.
1041        Flagged data in the scantable gets interpolated over the region.
1042        No taper is applied.
1043
1044        Parameters:
1045
1046            start:    the start frequency (really a period within the
1047                      bandwidth)  or period to remove
1048
1049            end:      the end frequency or period to remove
1050
1051            unit:     the frequency unit (default "MHz") or "" for
1052                      explicit lag channels
1053
1054        *Notes*:
1055
1056            It is recommended to flag edges of the band or strong
1057            signals beforehand.
1058
1059        """
1060        if insitu is None: insitu = rcParams['insitu']
1061        self._math._setinsitu(insitu)
1062        varlist = vars()
1063        base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1064        if not (unit == "" or base.has_key(unit)):
1065            raise ValueError("%s is not a valid unit." % unit)
1066        if unit == "":
1067            s = scantable(self._math._lag_flag(self, start, end, "lags"))
1068        else:
1069            s = scantable(self._math._lag_flag(self, start*base[unit],
1070                                               end*base[unit], "frequency"))
1071        s._add_history("lag_flag", varlist)
1072        if insitu:
1073            self._assign(s)
1074        else:
1075            return s
1076
1077    @asaplog_post_dec
1078    def create_mask(self, *args, **kwargs):
1079        """\
1080        Compute and return a mask based on [min, max] windows.
1081        The specified windows are to be INCLUDED, when the mask is
1082        applied.
1083
1084        Parameters:
1085
1086            [min, max], [min2, max2], ...
1087                Pairs of start/end points (inclusive)specifying the regions
1088                to be masked
1089
1090            invert:     optional argument. If specified as True,
1091                        return an inverted mask, i.e. the regions
1092                        specified are EXCLUDED
1093
1094            row:        create the mask using the specified row for
1095                        unit conversions, default is row=0
1096                        only necessary if frequency varies over rows.
1097
1098        Examples::
1099
1100            scan.set_unit('channel')
1101            # a)
1102            msk = scan.create_mask([400, 500], [800, 900])
1103            # masks everything outside 400 and 500
1104            # and 800 and 900 in the unit 'channel'
1105
1106            # b)
1107            msk = scan.create_mask([400, 500], [800, 900], invert=True)
1108            # masks the regions between 400 and 500
1109            # and 800 and 900 in the unit 'channel'
1110
1111            # c)
1112            #mask only channel 400
1113            msk =  scan.create_mask([400])
1114
1115        """
1116        row = kwargs.get("row", 0)
1117        data = self._getabcissa(row)
1118        u = self._getcoordinfo()[0]
1119        if u == "":
1120            u = "channel"
1121        msg = "The current mask window unit is %s" % u
1122        i = self._check_ifs()
1123        if not i:
1124            msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1125        asaplog.push(msg)
1126        n = self.nchan()
1127        msk = _n_bools(n, False)
1128        # test if args is a 'list' or a 'normal *args - UGLY!!!
1129
1130        ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1131             and args or args[0]
1132        for window in ws:
1133            if len(window) == 1:
1134                window = [window[0], window[0]]
1135            if len(window) == 0 or  len(window) > 2:
1136                raise ValueError("A window needs to be defined as [start(, end)]")
1137            if window[0] > window[1]:
1138                tmp = window[0]
1139                window[0] = window[1]
1140                window[1] = tmp
1141            for i in range(n):
1142                if data[i] >= window[0] and data[i] <= window[1]:
1143                    msk[i] = True
1144        if kwargs.has_key('invert'):
1145            if kwargs.get('invert'):
1146                msk = mask_not(msk)
1147        return msk
1148
1149    def get_masklist(self, mask=None, row=0):
1150        """\
1151        Compute and return a list of mask windows, [min, max].
1152
1153        Parameters:
1154
1155            mask:       channel mask, created with create_mask.
1156
1157            row:        calcutate the masklist using the specified row
1158                        for unit conversions, default is row=0
1159                        only necessary if frequency varies over rows.
1160
1161        Returns:
1162
1163            [min, max], [min2, max2], ...
1164                Pairs of start/end points (inclusive)specifying
1165                the masked regions
1166
1167        """
1168        if not (isinstance(mask,list) or isinstance(mask, tuple)):
1169            raise TypeError("The mask should be list or tuple.")
1170        if len(mask) < 2:
1171            raise TypeError("The mask elements should be > 1")
1172        if self.nchan() != len(mask):
1173            msg = "Number of channels in scantable != number of mask elements"
1174            raise TypeError(msg)
1175        data = self._getabcissa(row)
1176        u = self._getcoordinfo()[0]
1177        if u == "":
1178            u = "channel"
1179        msg = "The current mask window unit is %s" % u
1180        i = self._check_ifs()
1181        if not i:
1182            msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1183        asaplog.push(msg)
1184        masklist=[]
1185        ist, ien = None, None
1186        ist, ien=self.get_mask_indices(mask)
1187        if ist is not None and ien is not None:
1188            for i in xrange(len(ist)):
1189                range=[data[ist[i]],data[ien[i]]]
1190                range.sort()
1191                masklist.append([range[0],range[1]])
1192        return masklist
1193
1194    def get_mask_indices(self, mask=None):
1195        """\
1196        Compute and Return lists of mask start indices and mask end indices.
1197
1198        Parameters:
1199
1200            mask:       channel mask, created with create_mask.
1201
1202        Returns:
1203
1204            List of mask start indices and that of mask end indices,
1205            i.e., [istart1,istart2,....], [iend1,iend2,....].
1206
1207        """
1208        if not (isinstance(mask,list) or isinstance(mask, tuple)):
1209            raise TypeError("The mask should be list or tuple.")
1210        if len(mask) < 2:
1211            raise TypeError("The mask elements should be > 1")
1212        istart=[]
1213        iend=[]
1214        if mask[0]: istart.append(0)
1215        for i in range(len(mask)-1):
1216            if not mask[i] and mask[i+1]:
1217                istart.append(i+1)
1218            elif mask[i] and not mask[i+1]:
1219                iend.append(i)
1220        if mask[len(mask)-1]: iend.append(len(mask)-1)
1221        if len(istart) != len(iend):
1222            raise RuntimeError("Numbers of mask start != mask end.")
1223        for i in range(len(istart)):
1224            if istart[i] > iend[i]:
1225                raise RuntimeError("Mask start index > mask end index")
1226                break
1227        return istart,iend
1228
1229#    def get_restfreqs(self):
1230#        """
1231#        Get the restfrequency(s) stored in this scantable.
1232#        The return value(s) are always of unit 'Hz'
1233#        Parameters:
1234#            none
1235#        Returns:
1236#            a list of doubles
1237#        """
1238#        return list(self._getrestfreqs())
1239
1240    def get_restfreqs(self, ids=None):
1241        """\
1242        Get the restfrequency(s) stored in this scantable.
1243        The return value(s) are always of unit 'Hz'
1244
1245        Parameters:
1246
1247            ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1248                 be retrieved
1249
1250        Returns:
1251
1252            dictionary containing ids and a list of doubles for each id
1253
1254        """
1255        if ids is None:
1256            rfreqs={}
1257            idlist = self.getmolnos()
1258            for i in idlist:
1259                rfreqs[i]=list(self._getrestfreqs(i))
1260            return rfreqs
1261        else:
1262            if type(ids)==list or type(ids)==tuple:
1263                rfreqs={}
1264                for i in ids:
1265                    rfreqs[i]=list(self._getrestfreqs(i))
1266                return rfreqs
1267            else:
1268                return list(self._getrestfreqs(ids))
1269            #return list(self._getrestfreqs(ids))
1270
1271    def set_restfreqs(self, freqs=None, unit='Hz'):
1272        """\
1273        Set or replace the restfrequency specified and
1274        If the 'freqs' argument holds a scalar,
1275        then that rest frequency will be applied to all the selected
1276        data.  If the 'freqs' argument holds
1277        a vector, then it MUST be of equal or smaller length than
1278        the number of IFs (and the available restfrequencies will be
1279        replaced by this vector).  In this case, *all* data have
1280        the restfrequency set per IF according
1281        to the corresponding value you give in the 'freqs' vector.
1282        E.g. 'freqs=[1e9, 2e9]'  would mean IF 0 gets restfreq 1e9 and
1283        IF 1 gets restfreq 2e9.
1284
1285        You can also specify the frequencies via a linecatalog.
1286
1287        Parameters:
1288
1289            freqs:   list of rest frequency values or string idenitfiers
1290
1291            unit:    unit for rest frequency (default 'Hz')
1292
1293
1294        Example::
1295
1296            # set the given restfrequency for the all currently selected IFs
1297            scan.set_restfreqs(freqs=1.4e9)
1298            # set restfrequencies for the n IFs  (n > 1) in the order of the
1299            # list, i.e
1300            # IF0 -> 1.4e9, IF1 ->  1.41e9, IF3 -> 1.42e9
1301            # len(list_of_restfreqs) == nIF
1302            # for nIF == 1 the following will set multiple restfrequency for
1303            # that IF
1304            scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
1305            # set multiple restfrequencies per IF. as a list of lists where
1306            # the outer list has nIF elements, the inner s arbitrary
1307            scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
1308
1309       *Note*:
1310
1311            To do more sophisticate Restfrequency setting, e.g. on a
1312            source and IF basis, use scantable.set_selection() before using
1313            this function::
1314
1315                # provided your scantable is called scan
1316                selection = selector()
1317                selection.set_name("ORION*")
1318                selection.set_ifs([1])
1319                scan.set_selection(selection)
1320                scan.set_restfreqs(freqs=86.6e9)
1321
1322        """
1323        varlist = vars()
1324        from asap import linecatalog
1325        # simple  value
1326        if isinstance(freqs, int) or isinstance(freqs, float):
1327            self._setrestfreqs([freqs], [""], unit)
1328        # list of values
1329        elif isinstance(freqs, list) or isinstance(freqs, tuple):
1330            # list values are scalars
1331            if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
1332                if len(freqs) == 1:
1333                    self._setrestfreqs(freqs, [""], unit)
1334                else:
1335                    # allow the 'old' mode of setting mulitple IFs
1336                    sel = selector()
1337                    savesel = self._getselection()
1338                    iflist = self.getifnos()
1339                    if len(freqs)>len(iflist):
1340                        raise ValueError("number of elements in list of list "
1341                                         "exeeds the current IF selections")
1342                    iflist = self.getifnos()
1343                    for i, fval in enumerate(freqs):
1344                        sel.set_ifs(iflist[i])
1345                        self._setselection(sel)
1346                        self._setrestfreqs([fval], [""], unit)
1347                    self._setselection(savesel)
1348
1349            # list values are dict, {'value'=, 'name'=)
1350            elif isinstance(freqs[-1], dict):
1351                values = []
1352                names = []
1353                for d in freqs:
1354                    values.append(d["value"])
1355                    names.append(d["name"])
1356                self._setrestfreqs(values, names, unit)
1357            elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
1358                sel = selector()
1359                savesel = self._getselection()
1360                iflist = self.getifnos()
1361                if len(freqs)>len(iflist):
1362                    raise ValueError("number of elements in list of list exeeds"
1363                                     " the current IF selections")
1364                for i, fval in enumerate(freqs):
1365                    sel.set_ifs(iflist[i])
1366                    self._setselection(sel)
1367                    self._setrestfreqs(fval, [""], unit)
1368                self._setselection(savesel)
1369        # freqs are to be taken from a linecatalog
1370        elif isinstance(freqs, linecatalog):
1371            sel = selector()
1372            savesel = self._getselection()
1373            for i in xrange(freqs.nrow()):
1374                sel.set_ifs(iflist[i])
1375                self._setselection(sel)
1376                self._setrestfreqs([freqs.get_frequency(i)],
1377                                   [freqs.get_name(i)], "MHz")
1378                # ensure that we are not iterating past nIF
1379                if i == self.nif()-1: break
1380            self._setselection(savesel)
1381        else:
1382            return
1383        self._add_history("set_restfreqs", varlist)
1384
1385    def shift_refpix(self, delta):
1386        """\
1387        Shift the reference pixel of the Spectra Coordinate by an
1388        integer amount.
1389
1390        Parameters:
1391
1392            delta:   the amount to shift by
1393
1394        *Note*:
1395
1396            Be careful using this with broadband data.
1397
1398        """
1399        Scantable.shift_refpix(self, delta)
1400
1401    @asaplog_post_dec
1402    def history(self, filename=None):
1403        """\
1404        Print the history. Optionally to a file.
1405
1406        Parameters:
1407
1408            filename:    The name  of the file to save the history to.
1409
1410        """
1411        hist = list(self._gethistory())
1412        out = "-"*80
1413        for h in hist:
1414            if h.startswith("---"):
1415                out = "\n".join([out, h])
1416            else:
1417                items = h.split("##")
1418                date = items[0]
1419                func = items[1]
1420                items = items[2:]
1421                out += "\n"+date+"\n"
1422                out += "Function: %s\n  Parameters:" % (func)
1423                for i in items:
1424                    s = i.split("=")
1425                    out += "\n   %s = %s" % (s[0], s[1])
1426                out = "\n".join([out, "-"*80])
1427        if filename is not None:
1428            if filename is "":
1429                filename = 'scantable_history.txt'
1430            import os
1431            filename = os.path.expandvars(os.path.expanduser(filename))
1432            if not os.path.isdir(filename):
1433                data = open(filename, 'w')
1434                data.write(out)
1435                data.close()
1436            else:
1437                msg = "Illegal file name '%s'." % (filename)
1438                raise IOError(msg)
1439        return page(out)
1440    #
1441    # Maths business
1442    #
1443    @asaplog_post_dec
1444    def average_time(self, mask=None, scanav=False, weight='tint', align=False):
1445        """\
1446        Return the (time) weighted average of a scan.
1447
1448        *Note*:
1449
1450            in channels only - align if necessary
1451
1452        Parameters:
1453
1454            mask:     an optional mask (only used for 'var' and 'tsys'
1455                      weighting)
1456
1457            scanav:   True averages each scan separately
1458                      False (default) averages all scans together,
1459
1460            weight:   Weighting scheme.
1461                      'none'     (mean no weight)
1462                      'var'      (1/var(spec) weighted)
1463                      'tsys'     (1/Tsys**2 weighted)
1464                      'tint'     (integration time weighted)
1465                      'tintsys'  (Tint/Tsys**2)
1466                      'median'   ( median averaging)
1467                      The default is 'tint'
1468
1469            align:    align the spectra in velocity before averaging. It takes
1470                      the time of the first spectrum as reference time.
1471
1472        Example::
1473
1474            # time average the scantable without using a mask
1475            newscan = scan.average_time()
1476
1477        """
1478        varlist = vars()
1479        weight = weight or 'TINT'
1480        mask = mask or ()
1481        scanav = (scanav and 'SCAN') or 'NONE'
1482        scan = (self, )
1483
1484        if align:
1485            scan = (self.freq_align(insitu=False), )
1486        s = None
1487        if weight.upper() == 'MEDIAN':
1488            s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1489                                                     scanav))
1490        else:
1491            s = scantable(self._math._average(scan, mask, weight.upper(),
1492                          scanav))
1493        s._add_history("average_time", varlist)
1494        return s
1495
1496    @asaplog_post_dec
1497    def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
1498        """\
1499        Return a scan where all spectra are converted to either
1500        Jansky or Kelvin depending upon the flux units of the scan table.
1501        By default the function tries to look the values up internally.
1502        If it can't find them (or if you want to over-ride), you must
1503        specify EITHER jyperk OR eta (and D which it will try to look up
1504        also if you don't set it). jyperk takes precedence if you set both.
1505
1506        Parameters:
1507
1508            jyperk:      the Jy / K conversion factor
1509
1510            eta:         the aperture efficiency
1511
1512            d:           the geomtric diameter (metres)
1513
1514            insitu:      if False a new scantable is returned.
1515                         Otherwise, the scaling is done in-situ
1516                         The default is taken from .asaprc (False)
1517
1518        """
1519        if insitu is None: insitu = rcParams['insitu']
1520        self._math._setinsitu(insitu)
1521        varlist = vars()
1522        jyperk = jyperk or -1.0
1523        d = d or -1.0
1524        eta = eta or -1.0
1525        s = scantable(self._math._convertflux(self, d, eta, jyperk))
1526        s._add_history("convert_flux", varlist)
1527        if insitu: self._assign(s)
1528        else: return s
1529
1530    @asaplog_post_dec
1531    def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1532        """\
1533        Return a scan after applying a gain-elevation correction.
1534        The correction can be made via either a polynomial or a
1535        table-based interpolation (and extrapolation if necessary).
1536        You specify polynomial coefficients, an ascii table or neither.
1537        If you specify neither, then a polynomial correction will be made
1538        with built in coefficients known for certain telescopes (an error
1539        will occur if the instrument is not known).
1540        The data and Tsys are *divided* by the scaling factors.
1541
1542        Parameters:
1543
1544            poly:        Polynomial coefficients (default None) to compute a
1545                         gain-elevation correction as a function of
1546                         elevation (in degrees).
1547
1548            filename:    The name of an ascii file holding correction factors.
1549                         The first row of the ascii file must give the column
1550                         names and these MUST include columns
1551                         "ELEVATION" (degrees) and "FACTOR" (multiply data
1552                         by this) somewhere.
1553                         The second row must give the data type of the
1554                         column. Use 'R' for Real and 'I' for Integer.
1555                         An example file would be
1556                         (actual factors are arbitrary) :
1557
1558                         TIME ELEVATION FACTOR
1559                         R R R
1560                         0.1 0 0.8
1561                         0.2 20 0.85
1562                         0.3 40 0.9
1563                         0.4 60 0.85
1564                         0.5 80 0.8
1565                         0.6 90 0.75
1566
1567            method:      Interpolation method when correcting from a table.
1568                         Values are  "nearest", "linear" (default), "cubic"
1569                         and "spline"
1570
1571            insitu:      if False a new scantable is returned.
1572                         Otherwise, the scaling is done in-situ
1573                         The default is taken from .asaprc (False)
1574
1575        """
1576
1577        if insitu is None: insitu = rcParams['insitu']
1578        self._math._setinsitu(insitu)
1579        varlist = vars()
1580        poly = poly or ()
1581        from os.path import expandvars
1582        filename = expandvars(filename)
1583        s = scantable(self._math._gainel(self, poly, filename, method))
1584        s._add_history("gain_el", varlist)
1585        if insitu:
1586            self._assign(s)
1587        else:
1588            return s
1589
1590    @asaplog_post_dec
1591    def freq_align(self, reftime=None, method='cubic', insitu=None):
1592        """\
1593        Return a scan where all rows have been aligned in frequency/velocity.
1594        The alignment frequency frame (e.g. LSRK) is that set by function
1595        set_freqframe.
1596
1597        Parameters:
1598
1599            reftime:     reference time to align at. By default, the time of
1600                         the first row of data is used.
1601
1602            method:      Interpolation method for regridding the spectra.
1603                         Choose from "nearest", "linear", "cubic" (default)
1604                         and "spline"
1605
1606            insitu:      if False a new scantable is returned.
1607                         Otherwise, the scaling is done in-situ
1608                         The default is taken from .asaprc (False)
1609
1610        """
1611        if insitu is None: insitu = rcParams["insitu"]
1612        self._math._setinsitu(insitu)
1613        varlist = vars()
1614        reftime = reftime or ""
1615        s = scantable(self._math._freq_align(self, reftime, method))
1616        s._add_history("freq_align", varlist)
1617        if insitu: self._assign(s)
1618        else: return s
1619
1620    @asaplog_post_dec
1621    def opacity(self, tau=None, insitu=None):
1622        """\
1623        Apply an opacity correction. The data
1624        and Tsys are multiplied by the correction factor.
1625
1626        Parameters:
1627
1628            tau:         (list of) opacity from which the correction factor is
1629                         exp(tau*ZD)
1630                         where ZD is the zenith-distance.
1631                         If a list is provided, it has to be of length nIF,
1632                         nIF*nPol or 1 and in order of IF/POL, e.g.
1633                         [opif0pol0, opif0pol1, opif1pol0 ...]
1634                         if tau is `None` the opacities are determined from a
1635                         model.
1636
1637            insitu:      if False a new scantable is returned.
1638                         Otherwise, the scaling is done in-situ
1639                         The default is taken from .asaprc (False)
1640
1641        """
1642        if insitu is None: insitu = rcParams['insitu']
1643        self._math._setinsitu(insitu)
1644        varlist = vars()
1645        if not hasattr(tau, "__len__"):
1646            tau = [tau]
1647        s = scantable(self._math._opacity(self, tau))
1648        s._add_history("opacity", varlist)
1649        if insitu: self._assign(s)
1650        else: return s
1651
1652    @asaplog_post_dec
1653    def bin(self, width=5, insitu=None):
1654        """\
1655        Return a scan where all spectra have been binned up.
1656
1657        Parameters:
1658
1659            width:       The bin width (default=5) in pixels
1660
1661            insitu:      if False a new scantable is returned.
1662                         Otherwise, the scaling is done in-situ
1663                         The default is taken from .asaprc (False)
1664
1665        """
1666        if insitu is None: insitu = rcParams['insitu']
1667        self._math._setinsitu(insitu)
1668        varlist = vars()
1669        s = scantable(self._math._bin(self, width))
1670        s._add_history("bin", varlist)
1671        if insitu:
1672            self._assign(s)
1673        else:
1674            return s
1675
1676    @asaplog_post_dec
1677    def resample(self, width=5, method='cubic', insitu=None):
1678        """\
1679        Return a scan where all spectra have been binned up.
1680
1681        Parameters:
1682
1683            width:       The bin width (default=5) in pixels
1684
1685            method:      Interpolation method when correcting from a table.
1686                         Values are  "nearest", "linear", "cubic" (default)
1687                         and "spline"
1688
1689            insitu:      if False a new scantable is returned.
1690                         Otherwise, the scaling is done in-situ
1691                         The default is taken from .asaprc (False)
1692
1693        """
1694        if insitu is None: insitu = rcParams['insitu']
1695        self._math._setinsitu(insitu)
1696        varlist = vars()
1697        s = scantable(self._math._resample(self, method, width))
1698        s._add_history("resample", varlist)
1699        if insitu: self._assign(s)
1700        else: return s
1701
1702    @asaplog_post_dec
1703    def average_pol(self, mask=None, weight='none'):
1704        """\
1705        Average the Polarisations together.
1706
1707        Parameters:
1708
1709            mask:        An optional mask defining the region, where the
1710                         averaging will be applied. The output will have all
1711                         specified points masked.
1712
1713            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1714                         weighted), or 'tsys' (1/Tsys**2 weighted)
1715
1716        """
1717        varlist = vars()
1718        mask = mask or ()
1719        s = scantable(self._math._averagepol(self, mask, weight.upper()))
1720        s._add_history("average_pol", varlist)
1721        return s
1722
1723    @asaplog_post_dec
1724    def average_beam(self, mask=None, weight='none'):
1725        """\
1726        Average the Beams together.
1727
1728        Parameters:
1729            mask:        An optional mask defining the region, where the
1730                         averaging will be applied. The output will have all
1731                         specified points masked.
1732
1733            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1734                         weighted), or 'tsys' (1/Tsys**2 weighted)
1735
1736        """
1737        varlist = vars()
1738        mask = mask or ()
1739        s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1740        s._add_history("average_beam", varlist)
1741        return s
1742
1743    def parallactify(self, pflag):
1744        """\
1745        Set a flag to indicate whether this data should be treated as having
1746        been 'parallactified' (total phase == 0.0)
1747
1748        Parameters:
1749
1750            pflag:  Bool indicating whether to turn this on (True) or
1751                    off (False)
1752
1753        """
1754        varlist = vars()
1755        self._parallactify(pflag)
1756        self._add_history("parallactify", varlist)
1757
1758    @asaplog_post_dec
1759    def convert_pol(self, poltype=None):
1760        """\
1761        Convert the data to a different polarisation type.
1762        Note that you will need cross-polarisation terms for most conversions.
1763
1764        Parameters:
1765
1766            poltype:    The new polarisation type. Valid types are:
1767                        "linear", "circular", "stokes" and "linpol"
1768
1769        """
1770        varlist = vars()
1771        s = scantable(self._math._convertpol(self, poltype))
1772        s._add_history("convert_pol", varlist)
1773        return s
1774
1775    @asaplog_post_dec
1776    def smooth(self, kernel="hanning", width=5.0, order=2, plot=False, insitu=None):
1777        """\
1778        Smooth the spectrum by the specified kernel (conserving flux).
1779
1780        Parameters:
1781
1782            kernel:     The type of smoothing kernel. Select from
1783                        'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1784                        or 'poly'
1785
1786            width:      The width of the kernel in pixels. For hanning this is
1787                        ignored otherwise it defauls to 5 pixels.
1788                        For 'gaussian' it is the Full Width Half
1789                        Maximum. For 'boxcar' it is the full width.
1790                        For 'rmedian' and 'poly' it is the half width.
1791
1792            order:      Optional parameter for 'poly' kernel (default is 2), to
1793                        specify the order of the polnomial. Ignored by all other
1794                        kernels.
1795
1796            plot:       plot the original and the smoothed spectra.
1797                        In this each indivual fit has to be approved, by
1798                        typing 'y' or 'n'
1799
1800            insitu:     if False a new scantable is returned.
1801                        Otherwise, the scaling is done in-situ
1802                        The default is taken from .asaprc (False)
1803
1804        """
1805        if insitu is None: insitu = rcParams['insitu']
1806        self._math._setinsitu(insitu)
1807        varlist = vars()
1808
1809        if plot: orgscan = self.copy()
1810
1811        s = scantable(self._math._smooth(self, kernel.lower(), width, order))
1812        s._add_history("smooth", varlist)
1813
1814        if plot:
1815            if rcParams['plotter.gui']:
1816                from asap.asaplotgui import asaplotgui as asaplot
1817            else:
1818                from asap.asaplot import asaplot
1819            self._p=asaplot()
1820            self._p.set_panels()
1821            ylab=s._get_ordinate_label()
1822            #self._p.palette(0,["#777777","red"])
1823            for r in xrange(s.nrow()):
1824                xsm=s._getabcissa(r)
1825                ysm=s._getspectrum(r)
1826                xorg=orgscan._getabcissa(r)
1827                yorg=orgscan._getspectrum(r)
1828                self._p.clear()
1829                self._p.hold()
1830                self._p.set_axes('ylabel',ylab)
1831                self._p.set_axes('xlabel',s._getabcissalabel(r))
1832                self._p.set_axes('title',s._getsourcename(r))
1833                self._p.set_line(label='Original',color="#777777")
1834                self._p.plot(xorg,yorg)
1835                self._p.set_line(label='Smoothed',color="red")
1836                self._p.plot(xsm,ysm)
1837                ### Ugly part for legend
1838                for i in [0,1]:
1839                    self._p.subplots[0]['lines'].append([self._p.subplots[0]['axes'].lines[i]])
1840                self._p.release()
1841                ### Ugly part for legend
1842                self._p.subplots[0]['lines']=[]
1843                res = raw_input("Accept smoothing ([y]/n): ")
1844                if res.upper() == 'N':
1845                    s._setspectrum(yorg, r)
1846            self._p.unmap()
1847            self._p = None
1848            del orgscan
1849
1850        if insitu: self._assign(s)
1851        else: return s
1852
1853    @asaplog_post_dec
1854    def old_poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None, rows=None):
1855        """\
1856        Return a scan which has been baselined (all rows) by a polynomial.
1857       
1858        Parameters:
1859
1860            mask:       an optional mask
1861
1862            order:      the order of the polynomial (default is 0)
1863
1864            plot:       plot the fit and the residual. In this each
1865                        indivual fit has to be approved, by typing 'y'
1866                        or 'n'
1867
1868            uselin:     use linear polynomial fit
1869
1870            insitu:     if False a new scantable is returned.
1871                        Otherwise, the scaling is done in-situ
1872                        The default is taken from .asaprc (False)
1873
1874            rows:       row numbers of spectra to be processed.
1875                        (default is None: for all rows)
1876       
1877        Example:
1878            # return a scan baselined by a third order polynomial,
1879            # not using a mask
1880            bscan = scan.poly_baseline(order=3)
1881
1882        """
1883        if insitu is None: insitu = rcParams['insitu']
1884        if not insitu:
1885            workscan = self.copy()
1886        else:
1887            workscan = self
1888        varlist = vars()
1889        if mask is None:
1890            mask = [True for i in xrange(self.nchan())]
1891
1892        try:
1893            f = fitter()
1894            if uselin:
1895                f.set_function(lpoly=order)
1896            else:
1897                f.set_function(poly=order)
1898
1899            if rows == None:
1900                rows = xrange(workscan.nrow())
1901            elif isinstance(rows, int):
1902                rows = [ rows ]
1903           
1904            if len(rows) > 0:
1905                self.blpars = []
1906                self.masklists = []
1907                self.actualmask = []
1908           
1909            for r in rows:
1910                f.x = workscan._getabcissa(r)
1911                f.y = workscan._getspectrum(r)
1912                f.mask = mask_and(mask, workscan._getmask(r))    # (CAS-1434)
1913                f.data = None
1914                f.fit()
1915                if plot:
1916                    f.plot(residual=True)
1917                    x = raw_input("Accept fit ( [y]/n ): ")
1918                    if x.upper() == 'N':
1919                        self.blpars.append(None)
1920                        self.masklists.append(None)
1921                        self.actualmask.append(None)
1922                        continue
1923                workscan._setspectrum(f.fitter.getresidual(), r)
1924                self.blpars.append(f.get_parameters())
1925                self.masklists.append(workscan.get_masklist(f.mask, row=r))
1926                self.actualmask.append(f.mask)
1927
1928            if plot:
1929                f._p.unmap()
1930                f._p = None
1931            workscan._add_history("poly_baseline", varlist)
1932            if insitu:
1933                self._assign(workscan)
1934            else:
1935                return workscan
1936        except RuntimeError:
1937            msg = "The fit failed, possibly because it didn't converge."
1938            raise RuntimeError(msg)
1939
1940
1941    def poly_baseline(self, mask=None, order=0, plot=False, batch=False, insitu=None, rows=None):
1942        """\
1943        Return a scan which has been baselined (all rows) by a polynomial.
1944        Parameters:
1945            mask:       an optional mask
1946            order:      the order of the polynomial (default is 0)
1947            plot:       plot the fit and the residual. In this each
1948                        indivual fit has to be approved, by typing 'y'
1949                        or 'n'. Ignored if batch = True.
1950            batch:      if True a faster algorithm is used and logs
1951                        including the fit results are not output
1952                        (default is False)
1953            insitu:     if False a new scantable is returned.
1954                        Otherwise, the scaling is done in-situ
1955                        The default is taken from .asaprc (False)
1956            rows:       row numbers of spectra to be processed.
1957                        (default is None: for all rows)
1958        Example:
1959            # return a scan baselined by a third order polynomial,
1960            # not using a mask
1961            bscan = scan.poly_baseline(order=3)
1962        """
1963        if insitu is None: insitu = rcParams["insitu"]
1964        if insitu:
1965            workscan = self
1966        else:
1967            workscan = self.copy()
1968
1969        varlist = vars()
1970        nchan = workscan.nchan()
1971       
1972        if mask is None:
1973            mask = [True for i in xrange(nchan)]
1974
1975        try:
1976            if rows == None:
1977                rows = xrange(workscan.nrow())
1978            elif isinstance(rows, int):
1979                rows = [ rows ]
1980           
1981            if len(rows) > 0:
1982                self.blpars = []
1983                self.masklists = []
1984                self.actualmask = []
1985
1986            if batch:
1987                for r in rows:
1988                    workscan._poly_baseline_batch(mask, order, r)
1989            elif plot:
1990                f = fitter()
1991                f.set_function(lpoly=order)
1992                for r in rows:
1993                    f.x = workscan._getabcissa(r)
1994                    f.y = workscan._getspectrum(r)
1995                    f.mask = mask_and(mask, workscan._getmask(r))    # (CAS-1434)
1996                    f.data = None
1997                    f.fit()
1998                   
1999                    f.plot(residual=True)
2000                    accept_fit = raw_input("Accept fit ( [y]/n ): ")
2001                    if accept_fit.upper() == "N":
2002                        self.blpars.append(None)
2003                        self.masklists.append(None)
2004                        self.actualmask.append(None)
2005                        continue
2006                    workscan._setspectrum(f.fitter.getresidual(), r)
2007                    self.blpars.append(f.get_parameters())
2008                    self.masklists.append(workscan.get_masklist(f.mask, row=r))
2009                    self.actualmask.append(f.mask)
2010                   
2011                f._p.unmap()
2012                f._p = None
2013            else:
2014                import array
2015                for r in rows:
2016                    pars = array.array("f", [0.0 for i in range(order+1)])
2017                    pars_adr = pars.buffer_info()[0]
2018                    pars_len = pars.buffer_info()[1]
2019                   
2020                    errs = array.array("f", [0.0 for i in range(order+1)])
2021                    errs_adr = errs.buffer_info()[0]
2022                    errs_len = errs.buffer_info()[1]
2023                   
2024                    fmsk = array.array("i", [1 for i in range(nchan)])
2025                    fmsk_adr = fmsk.buffer_info()[0]
2026                    fmsk_len = fmsk.buffer_info()[1]
2027                   
2028                    workscan._poly_baseline(mask, order, r, pars_adr, pars_len, errs_adr, errs_len, fmsk_adr, fmsk_len)
2029                   
2030                    params = pars.tolist()
2031                    fmtd = ""
2032                    for i in xrange(len(params)): fmtd += "  p%d= %3.6f," % (i, params[i])
2033                    fmtd = fmtd[:-1]  # remove trailing ","
2034                    errors = errs.tolist()
2035                    fmask = fmsk.tolist()
2036                    for i in xrange(len(fmask)): fmask[i] = (fmask[i] > 0)    # transform (1/0) -> (True/False)
2037                   
2038                    self.blpars.append({"params":params, "fixed":[], "formatted":fmtd, "errors":errors})
2039                    self.masklists.append(workscan.get_masklist(fmask, r))
2040                    self.actualmask.append(fmask)
2041                   
2042                    asaplog.push(str(fmtd))
2043           
2044            workscan._add_history("poly_baseline", varlist)
2045           
2046            if insitu:
2047                self._assign(workscan)
2048            else:
2049                return workscan
2050           
2051        except RuntimeError:
2052            msg = "The fit failed, possibly because it didn't converge."
2053            if rcParams["verbose"]:
2054                asaplog.push(str(msg))
2055                return
2056            else:
2057                raise RuntimeError(msg)
2058
2059
2060    def auto_poly_baseline(self, mask=None, edge=(0, 0), order=0,
2061                           threshold=3, chan_avg_limit=1, plot=False,
2062                           insitu=None, rows=None):
2063        """\
2064        Return a scan which has been baselined (all rows) by a polynomial.
2065        Spectral lines are detected first using linefinder and masked out
2066        to avoid them affecting the baseline solution.
2067
2068        Parameters:
2069
2070            mask:       an optional mask retreived from scantable
2071
2072            edge:       an optional number of channel to drop at the edge of
2073                        spectrum. If only one value is
2074                        specified, the same number will be dropped from
2075                        both sides of the spectrum. Default is to keep
2076                        all channels. Nested tuples represent individual
2077                        edge selection for different IFs (a number of spectral
2078                        channels can be different)
2079
2080            order:      the order of the polynomial (default is 0)
2081
2082            threshold:  the threshold used by line finder. It is better to
2083                        keep it large as only strong lines affect the
2084                        baseline solution.
2085
2086            chan_avg_limit:
2087                        a maximum number of consequtive spectral channels to
2088                        average during the search of weak and broad lines.
2089                        The default is no averaging (and no search for weak
2090                        lines). If such lines can affect the fitted baseline
2091                        (e.g. a high order polynomial is fitted), increase this
2092                        parameter (usually values up to 8 are reasonable). Most
2093                        users of this method should find the default value
2094                        sufficient.
2095
2096            plot:       plot the fit and the residual. In this each
2097                        indivual fit has to be approved, by typing 'y'
2098                        or 'n'
2099
2100            insitu:     if False a new scantable is returned.
2101                        Otherwise, the scaling is done in-situ
2102                        The default is taken from .asaprc (False)
2103            rows:       row numbers of spectra to be processed.
2104                        (default is None: for all rows)
2105
2106
2107        Example::
2108
2109            scan2 = scan.auto_poly_baseline(order=7, insitu=False)
2110
2111        """
2112        if insitu is None: insitu = rcParams['insitu']
2113        varlist = vars()
2114        from asap.asaplinefind import linefinder
2115        from asap import _is_sequence_or_number as _is_valid
2116
2117        # check whether edge is set up for each IF individually
2118        individualedge = False;
2119        if len(edge) > 1:
2120            if isinstance(edge[0], list) or isinstance(edge[0], tuple):
2121                individualedge = True;
2122
2123        if not _is_valid(edge, int) and not individualedge:
2124            raise ValueError, "Parameter 'edge' has to be an integer or a \
2125            pair of integers specified as a tuple. Nested tuples are allowed \
2126            to make individual selection for different IFs."
2127
2128        curedge = (0, 0)
2129        if individualedge:
2130            for edgepar in edge:
2131                if not _is_valid(edgepar, int):
2132                    raise ValueError, "Each element of the 'edge' tuple has \
2133                                       to be a pair of integers or an integer."
2134        else:
2135            curedge = edge;
2136
2137        if not insitu:
2138            workscan = self.copy()
2139        else:
2140            workscan = self
2141
2142        # setup fitter
2143        f = fitter()
2144        f.set_function(lpoly=order)
2145
2146        # setup line finder
2147        fl = linefinder()
2148        fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
2149
2150        fl.set_scan(workscan)
2151
2152        if mask is None:
2153            mask = _n_bools(workscan.nchan(), True)
2154       
2155        if rows is None:
2156            rows = xrange(workscan.nrow())
2157        elif isinstance(rows, int):
2158            rows = [ rows ]
2159       
2160        # Save parameters of baseline fits & masklists as a class attribute.
2161        # NOTICE: It does not reflect changes in scantable!
2162        if len(rows) > 0:
2163            self.blpars=[]
2164            self.masklists=[]
2165            self.actualmask=[]
2166        asaplog.push("Processing:")
2167        for r in rows:
2168            msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
2169                (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
2170                 workscan.getpol(r), workscan.getcycle(r))
2171            asaplog.push(msg, False)
2172
2173            # figure out edge parameter
2174            if individualedge:
2175                if len(edge) >= workscan.getif(r):
2176                    raise RuntimeError, "Number of edge elements appear to " \
2177                                        "be less than the number of IFs"
2178                    curedge = edge[workscan.getif(r)]
2179
2180            actualmask = mask_and(mask, workscan._getmask(r))    # (CAS-1434)
2181
2182            # setup line finder
2183            fl.find_lines(r, actualmask, curedge)
2184           
2185            f.x = workscan._getabcissa(r)
2186            f.y = workscan._getspectrum(r)
2187            f.mask = fl.get_mask()
2188            f.data = None
2189            f.fit()
2190
2191            # Show mask list
2192            masklist=workscan.get_masklist(f.mask, row=r)
2193            msg = "mask range: "+str(masklist)
2194            asaplog.push(msg, False)
2195
2196            if plot:
2197                f.plot(residual=True)
2198                x = raw_input("Accept fit ( [y]/n ): ")
2199                if x.upper() == 'N':
2200                    self.blpars.append(None)
2201                    self.masklists.append(None)
2202                    self.actualmask.append(None)
2203                    continue
2204
2205            workscan._setspectrum(f.fitter.getresidual(), r)
2206            self.blpars.append(f.get_parameters())
2207            self.masklists.append(masklist)
2208            self.actualmask.append(f.mask)
2209        if plot:
2210            f._p.unmap()
2211            f._p = None
2212        workscan._add_history("auto_poly_baseline", varlist)
2213        if insitu:
2214            self._assign(workscan)
2215        else:
2216            return workscan
2217
2218    @asaplog_post_dec
2219    def rotate_linpolphase(self, angle):
2220        """\
2221        Rotate the phase of the complex polarization O=Q+iU correlation.
2222        This is always done in situ in the raw data.  So if you call this
2223        function more than once then each call rotates the phase further.
2224
2225        Parameters:
2226
2227            angle:   The angle (degrees) to rotate (add) by.
2228
2229        Example::
2230
2231            scan.rotate_linpolphase(2.3)
2232
2233        """
2234        varlist = vars()
2235        self._math._rotate_linpolphase(self, angle)
2236        self._add_history("rotate_linpolphase", varlist)
2237        return
2238
2239    @asaplog_post_dec
2240    def rotate_xyphase(self, angle):
2241        """\
2242        Rotate the phase of the XY correlation.  This is always done in situ
2243        in the data.  So if you call this function more than once
2244        then each call rotates the phase further.
2245
2246        Parameters:
2247
2248            angle:   The angle (degrees) to rotate (add) by.
2249
2250        Example::
2251
2252            scan.rotate_xyphase(2.3)
2253
2254        """
2255        varlist = vars()
2256        self._math._rotate_xyphase(self, angle)
2257        self._add_history("rotate_xyphase", varlist)
2258        return
2259
2260    @asaplog_post_dec
2261    def swap_linears(self):
2262        """\
2263        Swap the linear polarisations XX and YY, or better the first two
2264        polarisations as this also works for ciculars.
2265        """
2266        varlist = vars()
2267        self._math._swap_linears(self)
2268        self._add_history("swap_linears", varlist)
2269        return
2270
2271    @asaplog_post_dec
2272    def invert_phase(self):
2273        """\
2274        Invert the phase of the complex polarisation
2275        """
2276        varlist = vars()
2277        self._math._invert_phase(self)
2278        self._add_history("invert_phase", varlist)
2279        return
2280
2281    @asaplog_post_dec
2282    def add(self, offset, insitu=None):
2283        """\
2284        Return a scan where all spectra have the offset added
2285
2286        Parameters:
2287
2288            offset:      the offset
2289
2290            insitu:      if False a new scantable is returned.
2291                         Otherwise, the scaling is done in-situ
2292                         The default is taken from .asaprc (False)
2293
2294        """
2295        if insitu is None: insitu = rcParams['insitu']
2296        self._math._setinsitu(insitu)
2297        varlist = vars()
2298        s = scantable(self._math._unaryop(self, offset, "ADD", False))
2299        s._add_history("add", varlist)
2300        if insitu:
2301            self._assign(s)
2302        else:
2303            return s
2304
2305    @asaplog_post_dec
2306    def scale(self, factor, tsys=True, insitu=None):
2307        """\
2308
2309        Return a scan where all spectra are scaled by the give 'factor'
2310
2311        Parameters:
2312
2313            factor:      the scaling factor (float or 1D float list)
2314
2315            insitu:      if False a new scantable is returned.
2316                         Otherwise, the scaling is done in-situ
2317                         The default is taken from .asaprc (False)
2318
2319            tsys:        if True (default) then apply the operation to Tsys
2320                         as well as the data
2321
2322        """
2323        if insitu is None: insitu = rcParams['insitu']
2324        self._math._setinsitu(insitu)
2325        varlist = vars()
2326        s = None
2327        import numpy
2328        if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
2329            if isinstance(factor[0], list) or isinstance(factor[0], numpy.ndarray):
2330                from asapmath import _array2dOp
2331                s = _array2dOp( self.copy(), factor, "MUL", tsys )
2332            else:
2333                s = scantable( self._math._arrayop( self.copy(), factor, "MUL", tsys ) )
2334        else:
2335            s = scantable(self._math._unaryop(self.copy(), factor, "MUL", tsys))
2336        s._add_history("scale", varlist)
2337        if insitu:
2338            self._assign(s)
2339        else:
2340            return s
2341
2342    def set_sourcetype(self, match, matchtype="pattern",
2343                       sourcetype="reference"):
2344        """\
2345        Set the type of the source to be an source or reference scan
2346        using the provided pattern.
2347
2348        Parameters:
2349
2350            match:          a Unix style pattern, regular expression or selector
2351
2352            matchtype:      'pattern' (default) UNIX style pattern or
2353                            'regex' regular expression
2354
2355            sourcetype:     the type of the source to use (source/reference)
2356
2357        """
2358        varlist = vars()
2359        basesel = self.get_selection()
2360        stype = -1
2361        if sourcetype.lower().startswith("r"):
2362            stype = 1
2363        elif sourcetype.lower().startswith("s"):
2364            stype = 0
2365        else:
2366            raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
2367        if matchtype.lower().startswith("p"):
2368            matchtype = "pattern"
2369        elif matchtype.lower().startswith("r"):
2370            matchtype = "regex"
2371        else:
2372            raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
2373        sel = selector()
2374        if isinstance(match, selector):
2375            sel = match
2376        else:
2377            sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
2378        self.set_selection(basesel+sel)
2379        self._setsourcetype(stype)
2380        self.set_selection(basesel)
2381        self._add_history("set_sourcetype", varlist)
2382
2383    @asaplog_post_dec
2384    @preserve_selection
2385    def auto_quotient(self, preserve=True, mode='paired', verify=False):
2386        """\
2387        This function allows to build quotients automatically.
2388        It assumes the observation to have the same number of
2389        "ons" and "offs"
2390
2391        Parameters:
2392
2393            preserve:       you can preserve (default) the continuum or
2394                            remove it.  The equations used are
2395
2396                            preserve: Output = Toff * (on/off) - Toff
2397
2398                            remove:   Output = Toff * (on/off) - Ton
2399
2400            mode:           the on/off detection mode
2401                            'paired' (default)
2402                            identifies 'off' scans by the
2403                            trailing '_R' (Mopra/Parkes) or
2404                            '_e'/'_w' (Tid) and matches
2405                            on/off pairs from the observing pattern
2406                            'time'
2407                            finds the closest off in time
2408
2409        .. todo:: verify argument is not implemented
2410
2411        """
2412        varlist = vars()
2413        modes = ["time", "paired"]
2414        if not mode in modes:
2415            msg = "please provide valid mode. Valid modes are %s" % (modes)
2416            raise ValueError(msg)
2417        s = None
2418        if mode.lower() == "paired":
2419            sel = self.get_selection()
2420            sel.set_query("SRCTYPE==psoff")
2421            self.set_selection(sel)
2422            offs = self.copy()
2423            sel.set_query("SRCTYPE==pson")
2424            self.set_selection(sel)
2425            ons = self.copy()
2426            s = scantable(self._math._quotient(ons, offs, preserve))
2427        elif mode.lower() == "time":
2428            s = scantable(self._math._auto_quotient(self, mode, preserve))
2429        s._add_history("auto_quotient", varlist)
2430        return s
2431
2432    @asaplog_post_dec
2433    def mx_quotient(self, mask = None, weight='median', preserve=True):
2434        """\
2435        Form a quotient using "off" beams when observing in "MX" mode.
2436
2437        Parameters:
2438
2439            mask:           an optional mask to be used when weight == 'stddev'
2440
2441            weight:         How to average the off beams.  Default is 'median'.
2442
2443            preserve:       you can preserve (default) the continuum or
2444                            remove it.  The equations used are:
2445
2446                                preserve: Output = Toff * (on/off) - Toff
2447
2448                                remove:   Output = Toff * (on/off) - Ton
2449
2450        """
2451        mask = mask or ()
2452        varlist = vars()
2453        on = scantable(self._math._mx_extract(self, 'on'))
2454        preoff = scantable(self._math._mx_extract(self, 'off'))
2455        off = preoff.average_time(mask=mask, weight=weight, scanav=False)
2456        from asapmath  import quotient
2457        q = quotient(on, off, preserve)
2458        q._add_history("mx_quotient", varlist)
2459        return q
2460
2461    @asaplog_post_dec
2462    def freq_switch(self, insitu=None):
2463        """\
2464        Apply frequency switching to the data.
2465
2466        Parameters:
2467
2468            insitu:      if False a new scantable is returned.
2469                         Otherwise, the swictching is done in-situ
2470                         The default is taken from .asaprc (False)
2471
2472        """
2473        if insitu is None: insitu = rcParams['insitu']
2474        self._math._setinsitu(insitu)
2475        varlist = vars()
2476        s = scantable(self._math._freqswitch(self))
2477        s._add_history("freq_switch", varlist)
2478        if insitu:
2479            self._assign(s)
2480        else:
2481            return s
2482
2483    @asaplog_post_dec
2484    def recalc_azel(self):
2485        """Recalculate the azimuth and elevation for each position."""
2486        varlist = vars()
2487        self._recalcazel()
2488        self._add_history("recalc_azel", varlist)
2489        return
2490
2491    @asaplog_post_dec
2492    def __add__(self, other):
2493        varlist = vars()
2494        s = None
2495        if isinstance(other, scantable):
2496            s = scantable(self._math._binaryop(self, other, "ADD"))
2497        elif isinstance(other, float):
2498            s = scantable(self._math._unaryop(self, other, "ADD", False))
2499        else:
2500            raise TypeError("Other input is not a scantable or float value")
2501        s._add_history("operator +", varlist)
2502        return s
2503
2504    @asaplog_post_dec
2505    def __sub__(self, other):
2506        """
2507        implicit on all axes and on Tsys
2508        """
2509        varlist = vars()
2510        s = None
2511        if isinstance(other, scantable):
2512            s = scantable(self._math._binaryop(self, other, "SUB"))
2513        elif isinstance(other, float):
2514            s = scantable(self._math._unaryop(self, other, "SUB", False))
2515        else:
2516            raise TypeError("Other input is not a scantable or float value")
2517        s._add_history("operator -", varlist)
2518        return s
2519
2520    @asaplog_post_dec
2521    def __mul__(self, other):
2522        """
2523        implicit on all axes and on Tsys
2524        """
2525        varlist = vars()
2526        s = None
2527        if isinstance(other, scantable):
2528            s = scantable(self._math._binaryop(self, other, "MUL"))
2529        elif isinstance(other, float):
2530            s = scantable(self._math._unaryop(self, other, "MUL", False))
2531        else:
2532            raise TypeError("Other input is not a scantable or float value")
2533        s._add_history("operator *", varlist)
2534        return s
2535
2536
2537    @asaplog_post_dec
2538    def __div__(self, other):
2539        """
2540        implicit on all axes and on Tsys
2541        """
2542        varlist = vars()
2543        s = None
2544        if isinstance(other, scantable):
2545            s = scantable(self._math._binaryop(self, other, "DIV"))
2546        elif isinstance(other, float):
2547            if other == 0.0:
2548                raise ZeroDivisionError("Dividing by zero is not recommended")
2549            s = scantable(self._math._unaryop(self, other, "DIV", False))
2550        else:
2551            raise TypeError("Other input is not a scantable or float value")
2552        s._add_history("operator /", varlist)
2553        return s
2554
2555    @asaplog_post_dec
2556    def get_fit(self, row=0):
2557        """\
2558        Print or return the stored fits for a row in the scantable
2559
2560        Parameters:
2561
2562            row:    the row which the fit has been applied to.
2563
2564        """
2565        if row > self.nrow():
2566            return
2567        from asap.asapfit import asapfit
2568        fit = asapfit(self._getfit(row))
2569        asaplog.push( '%s' %(fit) )
2570        return fit.as_dict()
2571
2572    def flag_nans(self):
2573        """\
2574        Utility function to flag NaN values in the scantable.
2575        """
2576        import numpy
2577        basesel = self.get_selection()
2578        for i in range(self.nrow()):
2579            sel = self.get_row_selector(i)
2580            self.set_selection(basesel+sel)
2581            nans = numpy.isnan(self._getspectrum(0))
2582        if numpy.any(nans):
2583            bnans = [ bool(v) for v in nans]
2584            self.flag(bnans)
2585        self.set_selection(basesel)
2586
2587    def get_row_selector(self, rowno):
2588        return selector(beams=self.getbeam(rowno),
2589                        ifs=self.getif(rowno),
2590                        pols=self.getpol(rowno),
2591                        scans=self.getscan(rowno),
2592                        cycles=self.getcycle(rowno))
2593
2594    def _add_history(self, funcname, parameters):
2595        if not rcParams['scantable.history']:
2596            return
2597        # create date
2598        sep = "##"
2599        from datetime import datetime
2600        dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
2601        hist = dstr+sep
2602        hist += funcname+sep#cdate+sep
2603        if parameters.has_key('self'): del parameters['self']
2604        for k, v in parameters.iteritems():
2605            if type(v) is dict:
2606                for k2, v2 in v.iteritems():
2607                    hist += k2
2608                    hist += "="
2609                    if isinstance(v2, scantable):
2610                        hist += 'scantable'
2611                    elif k2 == 'mask':
2612                        if isinstance(v2, list) or isinstance(v2, tuple):
2613                            hist += str(self._zip_mask(v2))
2614                        else:
2615                            hist += str(v2)
2616                    else:
2617                        hist += str(v2)
2618            else:
2619                hist += k
2620                hist += "="
2621                if isinstance(v, scantable):
2622                    hist += 'scantable'
2623                elif k == 'mask':
2624                    if isinstance(v, list) or isinstance(v, tuple):
2625                        hist += str(self._zip_mask(v))
2626                    else:
2627                        hist += str(v)
2628                else:
2629                    hist += str(v)
2630            hist += sep
2631        hist = hist[:-2] # remove trailing '##'
2632        self._addhistory(hist)
2633
2634
2635    def _zip_mask(self, mask):
2636        mask = list(mask)
2637        i = 0
2638        segments = []
2639        while mask[i:].count(1):
2640            i += mask[i:].index(1)
2641            if mask[i:].count(0):
2642                j = i + mask[i:].index(0)
2643            else:
2644                j = len(mask)
2645            segments.append([i, j])
2646            i = j
2647        return segments
2648
2649    def _get_ordinate_label(self):
2650        fu = "("+self.get_fluxunit()+")"
2651        import re
2652        lbl = "Intensity"
2653        if re.match(".K.", fu):
2654            lbl = "Brightness Temperature "+ fu
2655        elif re.match(".Jy.", fu):
2656            lbl = "Flux density "+ fu
2657        return lbl
2658
2659    def _check_ifs(self):
2660        nchans = [self.nchan(i) for i in range(self.nif(-1))]
2661        nchans = filter(lambda t: t > 0, nchans)
2662        return (sum(nchans)/len(nchans) == nchans[0])
2663
2664    @asaplog_post_dec
2665    def _fill(self, names, unit, average, getpt, antenna):
2666        first = True
2667        fullnames = []
2668        for name in names:
2669            name = os.path.expandvars(name)
2670            name = os.path.expanduser(name)
2671            if not os.path.exists(name):
2672                msg = "File '%s' does not exists" % (name)
2673                raise IOError(msg)
2674            fullnames.append(name)
2675        if average:
2676            asaplog.push('Auto averaging integrations')
2677        stype = int(rcParams['scantable.storage'].lower() == 'disk')
2678        for name in fullnames:
2679            tbl = Scantable(stype)
2680            r = filler(tbl)
2681            rx = rcParams['scantable.reference']
2682            r.setreferenceexpr(rx)
2683            msg = "Importing %s..." % (name)
2684            asaplog.push(msg, False)
2685            opts = {'ms': {'antenna' : antenna, 'getpt': getpt} }
2686            r.open(name, opts)# antenna, -1, -1, getpt)
2687            r.fill()
2688            if average:
2689                tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
2690            if not first:
2691                tbl = self._math._merge([self, tbl])
2692            Scantable.__init__(self, tbl)
2693            r.close()
2694            del r, tbl
2695            first = False
2696            #flush log
2697        asaplog.post()
2698        if unit is not None:
2699            self.set_fluxunit(unit)
2700        if not is_casapy():
2701            self.set_freqframe(rcParams['scantable.freqframe'])
2702
2703    def __getitem__(self, key):
2704        if key < 0:
2705            key += self.nrow()
2706        if key >= self.nrow():
2707            raise IndexError("Row index out of range.")
2708        return self._getspectrum(key)
2709
2710    def __setitem__(self, key, value):
2711        if key < 0:
2712            key += self.nrow()
2713        if key >= self.nrow():
2714            raise IndexError("Row index out of range.")
2715        if not hasattr(value, "__len__") or \
2716                len(value) > self.nchan(self.getif(key)):
2717            raise ValueError("Spectrum length doesn't match.")
2718        return self._setspectrum(value, key)
2719
2720    def __len__(self):
2721        return self.nrow()
2722
2723    def __iter__(self):
2724        for i in range(len(self)):
2725            yield self[i]
Note: See TracBrowser for help on using the repository browser.