source: trunk/python/scantable.py @ 1920

Last change on this file since 1920 was 1920, checked in by Takeshi Nakazato, 14 years ago

New Development: No

JIRA Issue: No

Ready for Test: Yes

Interface Changes: No

What Interface Changed: Please list interface changes

Test Programs: List test programs

Put in Release Notes: Yes/No?

Module(s): Module Names change impacts.

Description: Describe your changes here...

Modified help texts.
Added 'ms' to the possible suffix values for list_files().


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 92.9 KB
Line 
1"""This module defines the scantable class."""
2
3import os
4try:
5    from functools import wraps as wraps_dec
6except ImportError:
7    from asap.compatibility import wraps as wraps_dec
8
9from asap.env import is_casapy
10from asap._asap import Scantable
11from asap._asap import filler
12from asap.parameters import rcParams
13from asap.logging import asaplog, asaplog_post_dec
14from asap.selector import selector
15from asap.linecatalog import linecatalog
16from asap.coordinate import coordinate
17from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
18from asap.asapfitter import fitter
19
20
21def preserve_selection(func):
22    @wraps_dec(func)
23    def wrap(obj, *args, **kw):
24        basesel = obj.get_selection()
25        try:
26            val = func(obj, *args, **kw)
27        finally:
28            obj.set_selection(basesel)
29        return val
30    return wrap
31
32def is_scantable(filename):
33    """Is the given file a scantable?
34
35    Parameters:
36
37        filename: the name of the file/directory to test
38
39    """
40    if ( os.path.isdir(filename)
41         and os.path.exists(filename+'/table.info')
42         and os.path.exists(filename+'/table.dat') ):
43        f=open(filename+'/table.info')
44        l=f.readline()
45        f.close()
46        #if ( l.find('Scantable') != -1 ):
47        if ( l.find('Measurement Set') == -1 ):
48            return True
49        else:
50            return False
51    else:
52        return False
53##     return (os.path.isdir(filename)
54##             and not os.path.exists(filename+'/table.f1')
55##             and os.path.exists(filename+'/table.info'))
56
57def is_ms(filename):
58    """Is the given file a MeasurementSet?
59
60    Parameters:
61
62        filename: the name of the file/directory to test
63
64    """
65    if ( os.path.isdir(filename)
66         and os.path.exists(filename+'/table.info')
67         and os.path.exists(filename+'/table.dat') ):
68        f=open(filename+'/table.info')
69        l=f.readline()
70        f.close()
71        if ( l.find('Measurement Set') != -1 ):
72            return True
73        else:
74            return False
75    else:
76        return False
77   
78class scantable(Scantable):
79    """\
80        The ASAP container for scans (single-dish data).
81    """
82
83    @asaplog_post_dec
84    #def __init__(self, filename, average=None, unit=None, getpt=None,
85    #             antenna=None, parallactify=None):
86    def __init__(self, filename, average=None, unit=None, parallactify=None, **args):
87        """\
88        Create a scantable from a saved one or make a reference
89
90        Parameters:
91
92            filename:     the name of an asap table on disk
93                          or
94                          the name of a rpfits/sdfits/ms file
95                          (integrations within scans are auto averaged
96                          and the whole file is read) or
97                          [advanced] a reference to an existing scantable
98
99            average:      average all integrations withinb a scan on read.
100                          The default (True) is taken from .asaprc.
101
102            unit:         brightness unit; must be consistent with K or Jy.
103                          Over-rides the default selected by the filler
104                          (input rpfits/sdfits/ms) or replaces the value
105                          in existing scantables
106
107            getpt:        for MeasurementSet input data only:
108                          If True, all pointing data are filled.
109                          The deafult is False, which makes time to load
110                          the MS data faster in some cases.
111
112            antenna:      for MeasurementSet input data only:
113                          Antenna selection. integer (id) or string (name or id).
114
115            parallactify: Indicate that the data had been parallatified. Default
116                          is taken from rc file.
117
118        """
119        if average is None:
120            average = rcParams['scantable.autoaverage']
121        #if getpt is None:
122        #    getpt = True
123        #if antenna is not None:
124        #    asaplog.push("Antenna selection currently unsupported."
125        #                 "Using ''")
126        #    asaplog.post('WARN')
127        #if antenna is None:
128        #    antenna = ''
129        #elif type(antenna) == int:
130        #    antenna = '%s' % antenna
131        #elif type(antenna) == list:
132        #    tmpstr = ''
133        #    for i in range( len(antenna) ):
134        #        if type(antenna[i]) == int:
135        #            tmpstr = tmpstr + ('%s,'%(antenna[i]))
136        #        elif type(antenna[i]) == str:
137        #            tmpstr=tmpstr+antenna[i]+','
138        #        else:
139        #            raise TypeError('Bad antenna selection.')
140        #    antenna = tmpstr.rstrip(',')
141        parallactify = parallactify or rcParams['scantable.parallactify']
142        varlist = vars()
143        from asap._asap import stmath
144        self._math = stmath( rcParams['insitu'] )
145        if isinstance(filename, Scantable):
146            Scantable.__init__(self, filename)
147        else:
148            if isinstance(filename, str):
149                filename = os.path.expandvars(filename)
150                filename = os.path.expanduser(filename)
151                if not os.path.exists(filename):
152                    s = "File '%s' not found." % (filename)
153                    raise IOError(s)
154                if is_scantable(filename):
155                    ondisk = rcParams['scantable.storage'] == 'disk'
156                    Scantable.__init__(self, filename, ondisk)
157                    if unit is not None:
158                        self.set_fluxunit(unit)
159                    # do not reset to the default freqframe
160                    #self.set_freqframe(rcParams['scantable.freqframe'])
161                #elif os.path.isdir(filename) \
162                #         and not os.path.exists(filename+'/table.f1'):
163                elif is_ms(filename):
164                    # Measurement Set
165                    opts={'ms': {}}
166                    mskeys=['getpt','antenna']
167                    for key in mskeys:
168                        if key in args.keys():
169                            opts['ms'][key] = args[key]
170                    #self._fill([filename], unit, average, getpt, antenna)
171                    self._fill([filename], unit, average, opts)
172                elif os.path.isfile(filename):
173                    #self._fill([filename], unit, average, getpt, antenna)
174                    self._fill([filename], unit, average)
175                else:
176                    msg = "The given file '%s'is not a valid " \
177                          "asap table." % (filename)
178                    raise IOError(msg)
179            elif (isinstance(filename, list) or isinstance(filename, tuple)) \
180                  and isinstance(filename[-1], str):
181                #self._fill(filename, unit, average, getpt, antenna)
182                self._fill(filename, unit, average)
183        self.parallactify(parallactify)
184        self._add_history("scantable", varlist)
185
186    @asaplog_post_dec
187    def save(self, name=None, format=None, overwrite=False):
188        """\
189        Store the scantable on disk. This can be an asap (aips++) Table,
190        SDFITS or MS2 format.
191
192        Parameters:
193
194            name:        the name of the outputfile. For format "ASCII"
195                         this is the root file name (data in 'name'.txt
196                         and header in 'name'_header.txt)
197
198            format:      an optional file format. Default is ASAP.
199                         Allowed are:
200
201                            * 'ASAP' (save as ASAP [aips++] Table),
202                            * 'SDFITS' (save as SDFITS file)
203                            * 'ASCII' (saves as ascii text file)
204                            * 'MS2' (saves as an casacore MeasurementSet V2)
205                            * 'FITS' (save as image FITS - not readable by class)
206                            * 'CLASS' (save as FITS readable by CLASS)
207
208            overwrite:   If the file should be overwritten if it exists.
209                         The default False is to return with warning
210                         without writing the output. USE WITH CARE.
211
212        Example::
213
214            scan.save('myscan.asap')
215            scan.save('myscan.sdfits', 'SDFITS')
216
217        """
218        from os import path
219        format = format or rcParams['scantable.save']
220        suffix = '.'+format.lower()
221        if name is None or name == "":
222            name = 'scantable'+suffix
223            msg = "No filename given. Using default name %s..." % name
224            asaplog.push(msg)
225        name = path.expandvars(name)
226        if path.isfile(name) or path.isdir(name):
227            if not overwrite:
228                msg = "File %s exists." % name
229                raise IOError(msg)
230        format2 = format.upper()
231        if format2 == 'ASAP':
232            self._save(name)
233        else:
234            from asap._asap import stwriter as stw
235            writer = stw(format2)
236            writer.write(self, name)
237        return
238
239    def copy(self):
240        """Return a copy of this scantable.
241
242        *Note*:
243
244            This makes a full (deep) copy. scan2 = scan1 makes a reference.
245
246        Example::
247
248            copiedscan = scan.copy()
249
250        """
251        sd = scantable(Scantable._copy(self))
252        return sd
253
254    def drop_scan(self, scanid=None):
255        """\
256        Return a new scantable where the specified scan number(s) has(have)
257        been dropped.
258
259        Parameters:
260
261            scanid:    a (list of) scan number(s)
262
263        """
264        from asap import _is_sequence_or_number as _is_valid
265        from asap import _to_list
266        from asap import unique
267        if not _is_valid(scanid):
268            raise RuntimeError( 'Please specify a scanno to drop from the scantable' )
269        scanid = _to_list(scanid)
270        allscans = unique([ self.getscan(i) for i in range(self.nrow())])
271        for sid in scanid: allscans.remove(sid)
272        if len(allscans) == 0:
273            raise ValueError("Can't remove all scans")
274        sel = selector(scans=allscans)
275        return self._select_copy(sel)
276
277    def _select_copy(self, selection):
278        orig = self.get_selection()
279        self.set_selection(orig+selection)
280        cp = self.copy()
281        self.set_selection(orig)
282        return cp
283
284    def get_scan(self, scanid=None):
285        """\
286        Return a specific scan (by scanno) or collection of scans (by
287        source name) in a new scantable.
288
289        *Note*:
290
291            See scantable.drop_scan() for the inverse operation.
292
293        Parameters:
294
295            scanid:    a (list of) scanno or a source name, unix-style
296                       patterns are accepted for source name matching, e.g.
297                       '*_R' gets all 'ref scans
298
299        Example::
300
301            # get all scans containing the source '323p459'
302            newscan = scan.get_scan('323p459')
303            # get all 'off' scans
304            refscans = scan.get_scan('*_R')
305            # get a susbset of scans by scanno (as listed in scan.summary())
306            newscan = scan.get_scan([0, 2, 7, 10])
307
308        """
309        if scanid is None:
310            raise RuntimeError( 'Please specify a scan no or name to '
311                                'retrieve from the scantable' )
312        try:
313            bsel = self.get_selection()
314            sel = selector()
315            if type(scanid) is str:
316                sel.set_name(scanid)
317                return self._select_copy(sel)
318            elif type(scanid) is int:
319                sel.set_scans([scanid])
320                return self._select_copy(sel)
321            elif type(scanid) is list:
322                sel.set_scans(scanid)
323                return self._select_copy(sel)
324            else:
325                msg = "Illegal scanid type, use 'int' or 'list' if ints."
326                raise TypeError(msg)
327        except RuntimeError:
328            raise
329
330    def __str__(self):
331        return Scantable._summary(self, True)
332
333    def summary(self, filename=None):
334        """\
335        Print a summary of the contents of this scantable.
336
337        Parameters:
338
339            filename:    the name of a file to write the putput to
340                         Default - no file output
341
342        """
343        info = Scantable._summary(self, True)
344        if filename is not None:
345            if filename is "":
346                filename = 'scantable_summary.txt'
347            from os.path import expandvars, isdir
348            filename = expandvars(filename)
349            if not isdir(filename):
350                data = open(filename, 'w')
351                data.write(info)
352                data.close()
353            else:
354                msg = "Illegal file name '%s'." % (filename)
355                raise IOError(msg)
356        return page(info)
357
358    def get_spectrum(self, rowno):
359        """Return the spectrum for the current row in the scantable as a list.
360
361        Parameters:
362
363             rowno:   the row number to retrieve the spectrum from
364
365        """
366        return self._getspectrum(rowno)
367
368    def get_mask(self, rowno):
369        """Return the mask for the current row in the scantable as a list.
370
371        Parameters:
372
373             rowno:   the row number to retrieve the mask from
374
375        """
376        return self._getmask(rowno)
377
378    def set_spectrum(self, spec, rowno):
379        """Return the spectrum for the current row in the scantable as a list.
380
381        Parameters:
382
383             spec:   the new spectrum
384
385             rowno:  the row number to set the spectrum for
386
387        """
388        assert(len(spec) == self.nchan())
389        return self._setspectrum(spec, rowno)
390
391    def get_coordinate(self, rowno):
392        """Return the (spectral) coordinate for a a given 'rowno'.
393
394        *Note*:
395
396            * This coordinate is only valid until a scantable method modifies
397              the frequency axis.
398            * This coordinate does contain the original frequency set-up
399              NOT the new frame. The conversions however are done using the user
400              specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
401              use scantable.freq_align first. Without it there is no closure,
402              i.e.::
403
404                  c = myscan.get_coordinate(0)
405                  c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
406
407        Parameters:
408
409             rowno:    the row number for the spectral coordinate
410
411        """
412        return coordinate(Scantable.get_coordinate(self, rowno))
413
414    def get_selection(self):
415        """\
416        Get the selection object currently set on this scantable.
417
418        Example::
419
420            sel = scan.get_selection()
421            sel.set_ifs(0)              # select IF 0
422            scan.set_selection(sel)     # apply modified selection
423
424        """
425        return selector(self._getselection())
426
427    def set_selection(self, selection=None, **kw):
428        """\
429        Select a subset of the data. All following operations on this scantable
430        are only applied to thi selection.
431
432        Parameters:
433
434            selection:    a selector object (default unset the selection), or
435                          any combination of "pols", "ifs", "beams", "scans",
436                          "cycles", "name", "query"
437
438        Examples::
439
440            sel = selector()         # create a selection object
441            self.set_scans([0, 3])    # select SCANNO 0 and 3
442            scan.set_selection(sel)  # set the selection
443            scan.summary()           # will only print summary of scanno 0 an 3
444            scan.set_selection()     # unset the selection
445            # or the equivalent
446            scan.set_selection(scans=[0,3])
447            scan.summary()           # will only print summary of scanno 0 an 3
448            scan.set_selection()     # unset the selection
449
450        """
451        if selection is None:
452            # reset
453            if len(kw) == 0:
454                selection = selector()
455            else:
456                # try keywords
457                for k in kw:
458                    if k not in selector.fields:
459                        raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
460                selection = selector(**kw)
461        self._setselection(selection)
462
463    def get_row(self, row=0, insitu=None):
464        """\
465        Select a row in the scantable.
466        Return a scantable with single row.
467
468        Parameters:
469
470            row:    row no of integration, default is 0.
471            insitu: if False a new scantable is returned. Otherwise, the
472                    scaling is done in-situ. The default is taken from .asaprc
473                    (False)
474
475        """
476        if insitu is None: insitu = rcParams['insitu']
477        if not insitu:
478            workscan = self.copy()
479        else:
480            workscan = self
481        # Select a row
482        sel=selector()
483        sel.set_scans([workscan.getscan(row)])
484        sel.set_cycles([workscan.getcycle(row)])
485        sel.set_beams([workscan.getbeam(row)])
486        sel.set_ifs([workscan.getif(row)])
487        sel.set_polarisations([workscan.getpol(row)])
488        sel.set_name(workscan._getsourcename(row))
489        workscan.set_selection(sel)
490        if not workscan.nrow() == 1:
491            msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
492            raise RuntimeError(msg)
493        del sel
494        if insitu:
495            self._assign(workscan)
496        else:
497            return workscan
498
499    @asaplog_post_dec
500    def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
501        """\
502        Determine the specified statistic of the current beam/if/pol
503        Takes a 'mask' as an optional parameter to specify which
504        channels should be excluded.
505
506        Parameters:
507
508            stat:    'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
509                     'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
510
511            mask:    an optional mask specifying where the statistic
512                     should be determined.
513
514            form:    format string to print statistic values
515
516            row:     row number of spectrum to process.
517                     (default is None: for all rows)
518
519        Example:
520            scan.set_unit('channel')
521            msk = scan.create_mask([100, 200], [500, 600])
522            scan.stats(stat='mean', mask=m)
523
524        """
525        mask = mask or []
526        if not self._check_ifs():
527            raise ValueError("Cannot apply mask as the IFs have different "
528                             "number of channels. Please use setselection() "
529                             "to select individual IFs")
530        rtnabc = False
531        if stat.lower().endswith('_abc'): rtnabc = True
532        getchan = False
533        if stat.lower().startswith('min') or stat.lower().startswith('max'):
534            chan = self._math._minmaxchan(self, mask, stat)
535            getchan = True
536            statvals = []
537        if not rtnabc:
538            if row == None:
539                statvals = self._math._stats(self, mask, stat)
540            else:
541                statvals = self._math._statsrow(self, mask, stat, int(row))
542
543        #def cb(i):
544        #    return statvals[i]
545
546        #return self._row_callback(cb, stat)
547
548        label=stat
549        #callback=cb
550        out = ""
551        #outvec = []
552        sep = '-'*50
553
554        if row == None:
555            rows = xrange(self.nrow())
556        elif isinstance(row, int):
557            rows = [ row ]
558
559        for i in rows:
560            refstr = ''
561            statunit= ''
562            if getchan:
563                qx, qy = self.chan2data(rowno=i, chan=chan[i])
564                if rtnabc:
565                    statvals.append(qx['value'])
566                    refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
567                    statunit= '['+qx['unit']+']'
568                else:
569                    refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
570
571            tm = self._gettime(i)
572            src = self._getsourcename(i)
573            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
574            out += 'Time[%s]:\n' % (tm)
575            if self.nbeam(-1) > 1: out +=  ' Beam[%d] ' % (self.getbeam(i))
576            if self.nif(-1) > 1:   out +=  ' IF[%d] ' % (self.getif(i))
577            if self.npol(-1) > 1:  out +=  ' Pol[%d] ' % (self.getpol(i))
578            #outvec.append(callback(i))
579            if len(rows) > 1:
580                # out += ('= %'+form) % (outvec[i]) +'   '+refstr+'\n'
581                out += ('= %'+form) % (statvals[i]) +'   '+refstr+'\n'
582            else:
583                # out += ('= %'+form) % (outvec[0]) +'   '+refstr+'\n'
584                out += ('= %'+form) % (statvals[0]) +'   '+refstr+'\n'
585            out +=  sep+"\n"
586
587        import os
588        if os.environ.has_key( 'USER' ):
589            usr = os.environ['USER']
590        else:
591            import commands
592            usr = commands.getoutput( 'whoami' )
593        tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
594        f = open(tmpfile,'w')
595        print >> f, sep
596        print >> f, ' %s %s' % (label, statunit)
597        print >> f, sep
598        print >> f, out
599        f.close()
600        f = open(tmpfile,'r')
601        x = f.readlines()
602        f.close()
603        asaplog.push(''.join(x), False)
604
605        return statvals
606
607    def chan2data(self, rowno=0, chan=0):
608        """\
609        Returns channel/frequency/velocity and spectral value
610        at an arbitrary row and channel in the scantable.
611
612        Parameters:
613
614            rowno:   a row number in the scantable. Default is the
615                     first row, i.e. rowno=0
616
617            chan:    a channel in the scantable. Default is the first
618                     channel, i.e. pos=0
619
620        """
621        if isinstance(rowno, int) and isinstance(chan, int):
622            qx = {'unit': self.get_unit(),
623                  'value': self._getabcissa(rowno)[chan]}
624            qy = {'unit': self.get_fluxunit(),
625                  'value': self._getspectrum(rowno)[chan]}
626            return qx, qy
627
628    def stddev(self, mask=None):
629        """\
630        Determine the standard deviation of the current beam/if/pol
631        Takes a 'mask' as an optional parameter to specify which
632        channels should be excluded.
633
634        Parameters:
635
636            mask:    an optional mask specifying where the standard
637                     deviation should be determined.
638
639        Example::
640
641            scan.set_unit('channel')
642            msk = scan.create_mask([100, 200], [500, 600])
643            scan.stddev(mask=m)
644
645        """
646        return self.stats(stat='stddev', mask=mask);
647
648
649    def get_column_names(self):
650        """\
651        Return a  list of column names, which can be used for selection.
652        """
653        return list(Scantable.get_column_names(self))
654
655    def get_tsys(self, row=-1):
656        """\
657        Return the System temperatures.
658
659        Parameters:
660
661            row:    the rowno to get the information for. (default all rows)
662
663        Returns:
664
665            a list of Tsys values for the current selection
666
667        """
668        if row > -1:
669            return self._get_column(self._gettsys, row)
670        return self._row_callback(self._gettsys, "Tsys")
671
672
673    def get_weather(self, row=-1):
674        """\
675        Return the weather informations.
676
677        Parameters:
678
679            row:    the rowno to get the information for. (default all rows)
680
681        Returns:
682
683            a dict or list of of dicts of values for the current selection
684
685        """
686
687        values = self._get_column(self._get_weather, row)
688        if row > -1:
689            return {'temperature': values[0],
690                    'pressure': values[1], 'humidity' : values[2],
691                    'windspeed' : values[3], 'windaz' : values[4]
692                    }
693        else:
694            out = []
695            for r in values:
696
697                out.append({'temperature': r[0],
698                            'pressure': r[1], 'humidity' : r[2],
699                            'windspeed' : r[3], 'windaz' : r[4]
700                    })
701            return out
702
703    def _row_callback(self, callback, label):
704        out = ""
705        outvec = []
706        sep = '-'*50
707        for i in range(self.nrow()):
708            tm = self._gettime(i)
709            src = self._getsourcename(i)
710            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
711            out += 'Time[%s]:\n' % (tm)
712            if self.nbeam(-1) > 1:
713                out +=  ' Beam[%d] ' % (self.getbeam(i))
714            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (self.getif(i))
715            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (self.getpol(i))
716            outvec.append(callback(i))
717            out += '= %3.3f\n' % (outvec[i])
718            out +=  sep+'\n'
719
720        asaplog.push(sep)
721        asaplog.push(" %s" % (label))
722        asaplog.push(sep)
723        asaplog.push(out)
724        asaplog.post()
725        return outvec
726
727    def _get_column(self, callback, row=-1):
728        """
729        """
730        if row == -1:
731            return [callback(i) for i in range(self.nrow())]
732        else:
733            if  0 <= row < self.nrow():
734                return callback(row)
735
736
737    def get_time(self, row=-1, asdatetime=False):
738        """\
739        Get a list of time stamps for the observations.
740        Return a datetime object for each integration time stamp in the scantable.
741
742        Parameters:
743
744            row:          row no of integration. Default -1 return all rows
745
746            asdatetime:   return values as datetime objects rather than strings
747
748        """
749        from time import strptime
750        from datetime import datetime
751        times = self._get_column(self._gettime, row)
752        if not asdatetime:
753            return times
754        format = "%Y/%m/%d/%H:%M:%S"
755        if isinstance(times, list):
756            return [datetime(*strptime(i, format)[:6]) for i in times]
757        else:
758            return datetime(*strptime(times, format)[:6])
759
760
761    def get_inttime(self, row=-1):
762        """\
763        Get a list of integration times for the observations.
764        Return a time in seconds for each integration in the scantable.
765
766        Parameters:
767
768            row:    row no of integration. Default -1 return all rows.
769
770        """
771        return self._get_column(self._getinttime, row)
772
773
774    def get_sourcename(self, row=-1):
775        """\
776        Get a list source names for the observations.
777        Return a string for each integration in the scantable.
778        Parameters:
779
780            row:    row no of integration. Default -1 return all rows.
781
782        """
783        return self._get_column(self._getsourcename, row)
784
785    def get_elevation(self, row=-1):
786        """\
787        Get a list of elevations for the observations.
788        Return a float for each integration in the scantable.
789
790        Parameters:
791
792            row:    row no of integration. Default -1 return all rows.
793
794        """
795        return self._get_column(self._getelevation, row)
796
797    def get_azimuth(self, row=-1):
798        """\
799        Get a list of azimuths for the observations.
800        Return a float for each integration in the scantable.
801
802        Parameters:
803            row:    row no of integration. Default -1 return all rows.
804
805        """
806        return self._get_column(self._getazimuth, row)
807
808    def get_parangle(self, row=-1):
809        """\
810        Get a list of parallactic angles for the observations.
811        Return a float for each integration in the scantable.
812
813        Parameters:
814
815            row:    row no of integration. Default -1 return all rows.
816
817        """
818        return self._get_column(self._getparangle, row)
819
820    def get_direction(self, row=-1):
821        """
822        Get a list of Positions on the sky (direction) for the observations.
823        Return a string for each integration in the scantable.
824
825        Parameters:
826
827            row:    row no of integration. Default -1 return all rows
828
829        """
830        return self._get_column(self._getdirection, row)
831
832    def get_directionval(self, row=-1):
833        """\
834        Get a list of Positions on the sky (direction) for the observations.
835        Return a float for each integration in the scantable.
836
837        Parameters:
838
839            row:    row no of integration. Default -1 return all rows
840
841        """
842        return self._get_column(self._getdirectionvec, row)
843
844    @asaplog_post_dec
845    def set_unit(self, unit='channel'):
846        """\
847        Set the unit for all following operations on this scantable
848
849        Parameters:
850
851            unit:    optional unit, default is 'channel'. Use one of '*Hz',
852                     'km/s', 'channel' or equivalent ''
853
854        """
855        varlist = vars()
856        if unit in ['', 'pixel', 'channel']:
857            unit = ''
858        inf = list(self._getcoordinfo())
859        inf[0] = unit
860        self._setcoordinfo(inf)
861        self._add_history("set_unit", varlist)
862
863    @asaplog_post_dec
864    def set_instrument(self, instr):
865        """\
866        Set the instrument for subsequent processing.
867
868        Parameters:
869
870            instr:    Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
871                      'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
872
873        """
874        self._setInstrument(instr)
875        self._add_history("set_instument", vars())
876
877    @asaplog_post_dec
878    def set_feedtype(self, feedtype):
879        """\
880        Overwrite the feed type, which might not be set correctly.
881
882        Parameters:
883
884            feedtype:     'linear' or 'circular'
885
886        """
887        self._setfeedtype(feedtype)
888        self._add_history("set_feedtype", vars())
889
890    @asaplog_post_dec
891    def set_doppler(self, doppler='RADIO'):
892        """\
893        Set the doppler for all following operations on this scantable.
894
895        Parameters:
896
897            doppler:    One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
898
899        """
900        varlist = vars()
901        inf = list(self._getcoordinfo())
902        inf[2] = doppler
903        self._setcoordinfo(inf)
904        self._add_history("set_doppler", vars())
905
906    @asaplog_post_dec
907    def set_freqframe(self, frame=None):
908        """\
909        Set the frame type of the Spectral Axis.
910
911        Parameters:
912
913            frame:   an optional frame type, default 'LSRK'. Valid frames are:
914                     'TOPO', 'LSRD', 'LSRK', 'BARY',
915                     'GEO', 'GALACTO', 'LGROUP', 'CMB'
916
917        Example::
918
919            scan.set_freqframe('BARY')
920
921        """
922        frame = frame or rcParams['scantable.freqframe']
923        varlist = vars()
924        # "REST" is not implemented in casacore
925        #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
926        #           'GEO', 'GALACTO', 'LGROUP', 'CMB']
927        valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
928                   'GEO', 'GALACTO', 'LGROUP', 'CMB']
929
930        if frame in valid:
931            inf = list(self._getcoordinfo())
932            inf[1] = frame
933            self._setcoordinfo(inf)
934            self._add_history("set_freqframe", varlist)
935        else:
936            msg  = "Please specify a valid freq type. Valid types are:\n", valid
937            raise TypeError(msg)
938
939    @asaplog_post_dec
940    def set_dirframe(self, frame=""):
941        """\
942        Set the frame type of the Direction on the sky.
943
944        Parameters:
945
946            frame:   an optional frame type, default ''. Valid frames are:
947                     'J2000', 'B1950', 'GALACTIC'
948
949        Example:
950
951            scan.set_dirframe('GALACTIC')
952
953        """
954        varlist = vars()
955        Scantable.set_dirframe(self, frame)
956        self._add_history("set_dirframe", varlist)
957
958    def get_unit(self):
959        """\
960        Get the default unit set in this scantable
961
962        Returns:
963
964            A unit string
965
966        """
967        inf = self._getcoordinfo()
968        unit = inf[0]
969        if unit == '': unit = 'channel'
970        return unit
971
972    @asaplog_post_dec
973    def get_abcissa(self, rowno=0):
974        """\
975        Get the abcissa in the current coordinate setup for the currently
976        selected Beam/IF/Pol
977
978        Parameters:
979
980            rowno:    an optional row number in the scantable. Default is the
981                      first row, i.e. rowno=0
982
983        Returns:
984
985            The abcissa values and the format string (as a dictionary)
986
987        """
988        abc = self._getabcissa(rowno)
989        lbl = self._getabcissalabel(rowno)
990        return abc, lbl
991
992    @asaplog_post_dec
993    def flag(self, mask=None, unflag=False):
994        """\
995        Flag the selected data using an optional channel mask.
996
997        Parameters:
998
999            mask:   an optional channel mask, created with create_mask. Default
1000                    (no mask) is all channels.
1001
1002            unflag:    if True, unflag the data
1003
1004        """
1005        varlist = vars()
1006        mask = mask or []
1007        self._flag(mask, unflag)
1008        self._add_history("flag", varlist)
1009
1010    @asaplog_post_dec
1011    def flag_row(self, rows=[], unflag=False):
1012        """\
1013        Flag the selected data in row-based manner.
1014
1015        Parameters:
1016
1017            rows:   list of row numbers to be flagged. Default is no row
1018                    (must be explicitly specified to execute row-based flagging).
1019
1020            unflag: if True, unflag the data.
1021
1022        """
1023        varlist = vars()
1024        self._flag_row(rows, unflag)
1025        self._add_history("flag_row", varlist)
1026
1027    @asaplog_post_dec
1028    def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
1029        """\
1030        Flag the selected data outside a specified range (in channel-base)
1031
1032        Parameters:
1033
1034            uthres:      upper threshold.
1035
1036            dthres:      lower threshold
1037
1038            clipoutside: True for flagging data outside the range [dthres:uthres].
1039                         False for glagging data inside the range.
1040
1041            unflag:      if True, unflag the data.
1042
1043        """
1044        varlist = vars()
1045        self._clip(uthres, dthres, clipoutside, unflag)
1046        self._add_history("clip", varlist)
1047
1048    @asaplog_post_dec
1049    def lag_flag(self, start, end, unit="MHz", insitu=None):
1050        """\
1051        Flag the data in 'lag' space by providing a frequency to remove.
1052        Flagged data in the scantable gets interpolated over the region.
1053        No taper is applied.
1054
1055        Parameters:
1056
1057            start:    the start frequency (really a period within the
1058                      bandwidth)  or period to remove
1059
1060            end:      the end frequency or period to remove
1061
1062            unit:     the frequency unit (default "MHz") or "" for
1063                      explicit lag channels
1064
1065        *Notes*:
1066
1067            It is recommended to flag edges of the band or strong
1068            signals beforehand.
1069
1070        """
1071        if insitu is None: insitu = rcParams['insitu']
1072        self._math._setinsitu(insitu)
1073        varlist = vars()
1074        base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1075        if not (unit == "" or base.has_key(unit)):
1076            raise ValueError("%s is not a valid unit." % unit)
1077        if unit == "":
1078            s = scantable(self._math._lag_flag(self, start, end, "lags"))
1079        else:
1080            s = scantable(self._math._lag_flag(self, start*base[unit],
1081                                               end*base[unit], "frequency"))
1082        s._add_history("lag_flag", varlist)
1083        if insitu:
1084            self._assign(s)
1085        else:
1086            return s
1087
1088    @asaplog_post_dec
1089    def create_mask(self, *args, **kwargs):
1090        """\
1091        Compute and return a mask based on [min, max] windows.
1092        The specified windows are to be INCLUDED, when the mask is
1093        applied.
1094
1095        Parameters:
1096
1097            [min, max], [min2, max2], ...
1098                Pairs of start/end points (inclusive)specifying the regions
1099                to be masked
1100
1101            invert:     optional argument. If specified as True,
1102                        return an inverted mask, i.e. the regions
1103                        specified are EXCLUDED
1104
1105            row:        create the mask using the specified row for
1106                        unit conversions, default is row=0
1107                        only necessary if frequency varies over rows.
1108
1109        Examples::
1110
1111            scan.set_unit('channel')
1112            # a)
1113            msk = scan.create_mask([400, 500], [800, 900])
1114            # masks everything outside 400 and 500
1115            # and 800 and 900 in the unit 'channel'
1116
1117            # b)
1118            msk = scan.create_mask([400, 500], [800, 900], invert=True)
1119            # masks the regions between 400 and 500
1120            # and 800 and 900 in the unit 'channel'
1121
1122            # c)
1123            #mask only channel 400
1124            msk =  scan.create_mask([400])
1125
1126        """
1127        row = kwargs.get("row", 0)
1128        data = self._getabcissa(row)
1129        u = self._getcoordinfo()[0]
1130        if u == "":
1131            u = "channel"
1132        msg = "The current mask window unit is %s" % u
1133        i = self._check_ifs()
1134        if not i:
1135            msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1136        asaplog.push(msg)
1137        n = self.nchan()
1138        msk = _n_bools(n, False)
1139        # test if args is a 'list' or a 'normal *args - UGLY!!!
1140
1141        ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1142             and args or args[0]
1143        for window in ws:
1144            if len(window) == 1:
1145                window = [window[0], window[0]]
1146            if len(window) == 0 or  len(window) > 2:
1147                raise ValueError("A window needs to be defined as [start(, end)]")
1148            if window[0] > window[1]:
1149                tmp = window[0]
1150                window[0] = window[1]
1151                window[1] = tmp
1152            for i in range(n):
1153                if data[i] >= window[0] and data[i] <= window[1]:
1154                    msk[i] = True
1155        if kwargs.has_key('invert'):
1156            if kwargs.get('invert'):
1157                msk = mask_not(msk)
1158        return msk
1159
1160    def get_masklist(self, mask=None, row=0):
1161        """\
1162        Compute and return a list of mask windows, [min, max].
1163
1164        Parameters:
1165
1166            mask:       channel mask, created with create_mask.
1167
1168            row:        calcutate the masklist using the specified row
1169                        for unit conversions, default is row=0
1170                        only necessary if frequency varies over rows.
1171
1172        Returns:
1173
1174            [min, max], [min2, max2], ...
1175                Pairs of start/end points (inclusive)specifying
1176                the masked regions
1177
1178        """
1179        if not (isinstance(mask,list) or isinstance(mask, tuple)):
1180            raise TypeError("The mask should be list or tuple.")
1181        if len(mask) < 2:
1182            raise TypeError("The mask elements should be > 1")
1183        if self.nchan() != len(mask):
1184            msg = "Number of channels in scantable != number of mask elements"
1185            raise TypeError(msg)
1186        data = self._getabcissa(row)
1187        u = self._getcoordinfo()[0]
1188        if u == "":
1189            u = "channel"
1190        msg = "The current mask window unit is %s" % u
1191        i = self._check_ifs()
1192        if not i:
1193            msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1194        asaplog.push(msg)
1195        masklist=[]
1196        ist, ien = None, None
1197        ist, ien=self.get_mask_indices(mask)
1198        if ist is not None and ien is not None:
1199            for i in xrange(len(ist)):
1200                range=[data[ist[i]],data[ien[i]]]
1201                range.sort()
1202                masklist.append([range[0],range[1]])
1203        return masklist
1204
1205    def get_mask_indices(self, mask=None):
1206        """\
1207        Compute and Return lists of mask start indices and mask end indices.
1208
1209        Parameters:
1210
1211            mask:       channel mask, created with create_mask.
1212
1213        Returns:
1214
1215            List of mask start indices and that of mask end indices,
1216            i.e., [istart1,istart2,....], [iend1,iend2,....].
1217
1218        """
1219        if not (isinstance(mask,list) or isinstance(mask, tuple)):
1220            raise TypeError("The mask should be list or tuple.")
1221        if len(mask) < 2:
1222            raise TypeError("The mask elements should be > 1")
1223        istart=[]
1224        iend=[]
1225        if mask[0]: istart.append(0)
1226        for i in range(len(mask)-1):
1227            if not mask[i] and mask[i+1]:
1228                istart.append(i+1)
1229            elif mask[i] and not mask[i+1]:
1230                iend.append(i)
1231        if mask[len(mask)-1]: iend.append(len(mask)-1)
1232        if len(istart) != len(iend):
1233            raise RuntimeError("Numbers of mask start != mask end.")
1234        for i in range(len(istart)):
1235            if istart[i] > iend[i]:
1236                raise RuntimeError("Mask start index > mask end index")
1237                break
1238        return istart,iend
1239
1240#    def get_restfreqs(self):
1241#        """
1242#        Get the restfrequency(s) stored in this scantable.
1243#        The return value(s) are always of unit 'Hz'
1244#        Parameters:
1245#            none
1246#        Returns:
1247#            a list of doubles
1248#        """
1249#        return list(self._getrestfreqs())
1250
1251    def get_restfreqs(self, ids=None):
1252        """\
1253        Get the restfrequency(s) stored in this scantable.
1254        The return value(s) are always of unit 'Hz'
1255
1256        Parameters:
1257
1258            ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1259                 be retrieved
1260
1261        Returns:
1262
1263            dictionary containing ids and a list of doubles for each id
1264
1265        """
1266        if ids is None:
1267            rfreqs={}
1268            idlist = self.getmolnos()
1269            for i in idlist:
1270                rfreqs[i]=list(self._getrestfreqs(i))
1271            return rfreqs
1272        else:
1273            if type(ids)==list or type(ids)==tuple:
1274                rfreqs={}
1275                for i in ids:
1276                    rfreqs[i]=list(self._getrestfreqs(i))
1277                return rfreqs
1278            else:
1279                return list(self._getrestfreqs(ids))
1280            #return list(self._getrestfreqs(ids))
1281
1282    def set_restfreqs(self, freqs=None, unit='Hz'):
1283        """\
1284        Set or replace the restfrequency specified and
1285        If the 'freqs' argument holds a scalar,
1286        then that rest frequency will be applied to all the selected
1287        data.  If the 'freqs' argument holds
1288        a vector, then it MUST be of equal or smaller length than
1289        the number of IFs (and the available restfrequencies will be
1290        replaced by this vector).  In this case, *all* data have
1291        the restfrequency set per IF according
1292        to the corresponding value you give in the 'freqs' vector.
1293        E.g. 'freqs=[1e9, 2e9]'  would mean IF 0 gets restfreq 1e9 and
1294        IF 1 gets restfreq 2e9.
1295
1296        You can also specify the frequencies via a linecatalog.
1297
1298        Parameters:
1299
1300            freqs:   list of rest frequency values or string idenitfiers
1301
1302            unit:    unit for rest frequency (default 'Hz')
1303
1304
1305        Example::
1306
1307            # set the given restfrequency for the all currently selected IFs
1308            scan.set_restfreqs(freqs=1.4e9)
1309            # set restfrequencies for the n IFs  (n > 1) in the order of the
1310            # list, i.e
1311            # IF0 -> 1.4e9, IF1 ->  1.41e9, IF3 -> 1.42e9
1312            # len(list_of_restfreqs) == nIF
1313            # for nIF == 1 the following will set multiple restfrequency for
1314            # that IF
1315            scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
1316            # set multiple restfrequencies per IF. as a list of lists where
1317            # the outer list has nIF elements, the inner s arbitrary
1318            scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
1319
1320       *Note*:
1321
1322            To do more sophisticate Restfrequency setting, e.g. on a
1323            source and IF basis, use scantable.set_selection() before using
1324            this function::
1325
1326                # provided your scantable is called scan
1327                selection = selector()
1328                selection.set_name("ORION*")
1329                selection.set_ifs([1])
1330                scan.set_selection(selection)
1331                scan.set_restfreqs(freqs=86.6e9)
1332
1333        """
1334        varlist = vars()
1335        from asap import linecatalog
1336        # simple  value
1337        if isinstance(freqs, int) or isinstance(freqs, float):
1338            self._setrestfreqs([freqs], [""], unit)
1339        # list of values
1340        elif isinstance(freqs, list) or isinstance(freqs, tuple):
1341            # list values are scalars
1342            if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
1343                if len(freqs) == 1:
1344                    self._setrestfreqs(freqs, [""], unit)
1345                else:
1346                    # allow the 'old' mode of setting mulitple IFs
1347                    sel = selector()
1348                    savesel = self._getselection()
1349                    iflist = self.getifnos()
1350                    if len(freqs)>len(iflist):
1351                        raise ValueError("number of elements in list of list "
1352                                         "exeeds the current IF selections")
1353                    iflist = self.getifnos()
1354                    for i, fval in enumerate(freqs):
1355                        sel.set_ifs(iflist[i])
1356                        self._setselection(sel)
1357                        self._setrestfreqs([fval], [""], unit)
1358                    self._setselection(savesel)
1359
1360            # list values are dict, {'value'=, 'name'=)
1361            elif isinstance(freqs[-1], dict):
1362                values = []
1363                names = []
1364                for d in freqs:
1365                    values.append(d["value"])
1366                    names.append(d["name"])
1367                self._setrestfreqs(values, names, unit)
1368            elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
1369                sel = selector()
1370                savesel = self._getselection()
1371                iflist = self.getifnos()
1372                if len(freqs)>len(iflist):
1373                    raise ValueError("number of elements in list of list exeeds"
1374                                     " the current IF selections")
1375                for i, fval in enumerate(freqs):
1376                    sel.set_ifs(iflist[i])
1377                    self._setselection(sel)
1378                    self._setrestfreqs(fval, [""], unit)
1379                self._setselection(savesel)
1380        # freqs are to be taken from a linecatalog
1381        elif isinstance(freqs, linecatalog):
1382            sel = selector()
1383            savesel = self._getselection()
1384            for i in xrange(freqs.nrow()):
1385                sel.set_ifs(iflist[i])
1386                self._setselection(sel)
1387                self._setrestfreqs([freqs.get_frequency(i)],
1388                                   [freqs.get_name(i)], "MHz")
1389                # ensure that we are not iterating past nIF
1390                if i == self.nif()-1: break
1391            self._setselection(savesel)
1392        else:
1393            return
1394        self._add_history("set_restfreqs", varlist)
1395
1396    def shift_refpix(self, delta):
1397        """\
1398        Shift the reference pixel of the Spectra Coordinate by an
1399        integer amount.
1400
1401        Parameters:
1402
1403            delta:   the amount to shift by
1404
1405        *Note*:
1406
1407            Be careful using this with broadband data.
1408
1409        """
1410        Scantable.shift_refpix(self, delta)
1411
1412    @asaplog_post_dec
1413    def history(self, filename=None):
1414        """\
1415        Print the history. Optionally to a file.
1416
1417        Parameters:
1418
1419            filename:    The name  of the file to save the history to.
1420
1421        """
1422        hist = list(self._gethistory())
1423        out = "-"*80
1424        for h in hist:
1425            if h.startswith("---"):
1426                out = "\n".join([out, h])
1427            else:
1428                items = h.split("##")
1429                date = items[0]
1430                func = items[1]
1431                items = items[2:]
1432                out += "\n"+date+"\n"
1433                out += "Function: %s\n  Parameters:" % (func)
1434                for i in items:
1435                    s = i.split("=")
1436                    out += "\n   %s = %s" % (s[0], s[1])
1437                out = "\n".join([out, "-"*80])
1438        if filename is not None:
1439            if filename is "":
1440                filename = 'scantable_history.txt'
1441            import os
1442            filename = os.path.expandvars(os.path.expanduser(filename))
1443            if not os.path.isdir(filename):
1444                data = open(filename, 'w')
1445                data.write(out)
1446                data.close()
1447            else:
1448                msg = "Illegal file name '%s'." % (filename)
1449                raise IOError(msg)
1450        return page(out)
1451    #
1452    # Maths business
1453    #
1454    @asaplog_post_dec
1455    def average_time(self, mask=None, scanav=False, weight='tint', align=False):
1456        """\
1457        Return the (time) weighted average of a scan.
1458
1459        *Note*:
1460
1461            in channels only - align if necessary
1462
1463        Parameters:
1464
1465            mask:     an optional mask (only used for 'var' and 'tsys'
1466                      weighting)
1467
1468            scanav:   True averages each scan separately
1469                      False (default) averages all scans together,
1470
1471            weight:   Weighting scheme.
1472                      'none'     (mean no weight)
1473                      'var'      (1/var(spec) weighted)
1474                      'tsys'     (1/Tsys**2 weighted)
1475                      'tint'     (integration time weighted)
1476                      'tintsys'  (Tint/Tsys**2)
1477                      'median'   ( median averaging)
1478                      The default is 'tint'
1479
1480            align:    align the spectra in velocity before averaging. It takes
1481                      the time of the first spectrum as reference time.
1482
1483        Example::
1484
1485            # time average the scantable without using a mask
1486            newscan = scan.average_time()
1487
1488        """
1489        varlist = vars()
1490        weight = weight or 'TINT'
1491        mask = mask or ()
1492        scanav = (scanav and 'SCAN') or 'NONE'
1493        scan = (self, )
1494
1495        if align:
1496            scan = (self.freq_align(insitu=False), )
1497        s = None
1498        if weight.upper() == 'MEDIAN':
1499            s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1500                                                     scanav))
1501        else:
1502            s = scantable(self._math._average(scan, mask, weight.upper(),
1503                          scanav))
1504        s._add_history("average_time", varlist)
1505        return s
1506
1507    @asaplog_post_dec
1508    def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
1509        """\
1510        Return a scan where all spectra are converted to either
1511        Jansky or Kelvin depending upon the flux units of the scan table.
1512        By default the function tries to look the values up internally.
1513        If it can't find them (or if you want to over-ride), you must
1514        specify EITHER jyperk OR eta (and D which it will try to look up
1515        also if you don't set it). jyperk takes precedence if you set both.
1516
1517        Parameters:
1518
1519            jyperk:      the Jy / K conversion factor
1520
1521            eta:         the aperture efficiency
1522
1523            d:           the geomtric diameter (metres)
1524
1525            insitu:      if False a new scantable is returned.
1526                         Otherwise, the scaling is done in-situ
1527                         The default is taken from .asaprc (False)
1528
1529        """
1530        if insitu is None: insitu = rcParams['insitu']
1531        self._math._setinsitu(insitu)
1532        varlist = vars()
1533        jyperk = jyperk or -1.0
1534        d = d or -1.0
1535        eta = eta or -1.0
1536        s = scantable(self._math._convertflux(self, d, eta, jyperk))
1537        s._add_history("convert_flux", varlist)
1538        if insitu: self._assign(s)
1539        else: return s
1540
1541    @asaplog_post_dec
1542    def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1543        """\
1544        Return a scan after applying a gain-elevation correction.
1545        The correction can be made via either a polynomial or a
1546        table-based interpolation (and extrapolation if necessary).
1547        You specify polynomial coefficients, an ascii table or neither.
1548        If you specify neither, then a polynomial correction will be made
1549        with built in coefficients known for certain telescopes (an error
1550        will occur if the instrument is not known).
1551        The data and Tsys are *divided* by the scaling factors.
1552
1553        Parameters:
1554
1555            poly:        Polynomial coefficients (default None) to compute a
1556                         gain-elevation correction as a function of
1557                         elevation (in degrees).
1558
1559            filename:    The name of an ascii file holding correction factors.
1560                         The first row of the ascii file must give the column
1561                         names and these MUST include columns
1562                         "ELEVATION" (degrees) and "FACTOR" (multiply data
1563                         by this) somewhere.
1564                         The second row must give the data type of the
1565                         column. Use 'R' for Real and 'I' for Integer.
1566                         An example file would be
1567                         (actual factors are arbitrary) :
1568
1569                         TIME ELEVATION FACTOR
1570                         R R R
1571                         0.1 0 0.8
1572                         0.2 20 0.85
1573                         0.3 40 0.9
1574                         0.4 60 0.85
1575                         0.5 80 0.8
1576                         0.6 90 0.75
1577
1578            method:      Interpolation method when correcting from a table.
1579                         Values are  "nearest", "linear" (default), "cubic"
1580                         and "spline"
1581
1582            insitu:      if False a new scantable is returned.
1583                         Otherwise, the scaling is done in-situ
1584                         The default is taken from .asaprc (False)
1585
1586        """
1587
1588        if insitu is None: insitu = rcParams['insitu']
1589        self._math._setinsitu(insitu)
1590        varlist = vars()
1591        poly = poly or ()
1592        from os.path import expandvars
1593        filename = expandvars(filename)
1594        s = scantable(self._math._gainel(self, poly, filename, method))
1595        s._add_history("gain_el", varlist)
1596        if insitu:
1597            self._assign(s)
1598        else:
1599            return s
1600
1601    @asaplog_post_dec
1602    def freq_align(self, reftime=None, method='cubic', insitu=None):
1603        """\
1604        Return a scan where all rows have been aligned in frequency/velocity.
1605        The alignment frequency frame (e.g. LSRK) is that set by function
1606        set_freqframe.
1607
1608        Parameters:
1609
1610            reftime:     reference time to align at. By default, the time of
1611                         the first row of data is used.
1612
1613            method:      Interpolation method for regridding the spectra.
1614                         Choose from "nearest", "linear", "cubic" (default)
1615                         and "spline"
1616
1617            insitu:      if False a new scantable is returned.
1618                         Otherwise, the scaling is done in-situ
1619                         The default is taken from .asaprc (False)
1620
1621        """
1622        if insitu is None: insitu = rcParams["insitu"]
1623        self._math._setinsitu(insitu)
1624        varlist = vars()
1625        reftime = reftime or ""
1626        s = scantable(self._math._freq_align(self, reftime, method))
1627        s._add_history("freq_align", varlist)
1628        if insitu: self._assign(s)
1629        else: return s
1630
1631    @asaplog_post_dec
1632    def opacity(self, tau=None, insitu=None):
1633        """\
1634        Apply an opacity correction. The data
1635        and Tsys are multiplied by the correction factor.
1636
1637        Parameters:
1638
1639            tau:         (list of) opacity from which the correction factor is
1640                         exp(tau*ZD)
1641                         where ZD is the zenith-distance.
1642                         If a list is provided, it has to be of length nIF,
1643                         nIF*nPol or 1 and in order of IF/POL, e.g.
1644                         [opif0pol0, opif0pol1, opif1pol0 ...]
1645                         if tau is `None` the opacities are determined from a
1646                         model.
1647
1648            insitu:      if False a new scantable is returned.
1649                         Otherwise, the scaling is done in-situ
1650                         The default is taken from .asaprc (False)
1651
1652        """
1653        if insitu is None: insitu = rcParams['insitu']
1654        self._math._setinsitu(insitu)
1655        varlist = vars()
1656        if not hasattr(tau, "__len__"):
1657            tau = [tau]
1658        s = scantable(self._math._opacity(self, tau))
1659        s._add_history("opacity", varlist)
1660        if insitu: self._assign(s)
1661        else: return s
1662
1663    @asaplog_post_dec
1664    def bin(self, width=5, insitu=None):
1665        """\
1666        Return a scan where all spectra have been binned up.
1667
1668        Parameters:
1669
1670            width:       The bin width (default=5) in pixels
1671
1672            insitu:      if False a new scantable is returned.
1673                         Otherwise, the scaling is done in-situ
1674                         The default is taken from .asaprc (False)
1675
1676        """
1677        if insitu is None: insitu = rcParams['insitu']
1678        self._math._setinsitu(insitu)
1679        varlist = vars()
1680        s = scantable(self._math._bin(self, width))
1681        s._add_history("bin", varlist)
1682        if insitu:
1683            self._assign(s)
1684        else:
1685            return s
1686
1687    @asaplog_post_dec
1688    def resample(self, width=5, method='cubic', insitu=None):
1689        """\
1690        Return a scan where all spectra have been binned up.
1691
1692        Parameters:
1693
1694            width:       The bin width (default=5) in pixels
1695
1696            method:      Interpolation method when correcting from a table.
1697                         Values are  "nearest", "linear", "cubic" (default)
1698                         and "spline"
1699
1700            insitu:      if False a new scantable is returned.
1701                         Otherwise, the scaling is done in-situ
1702                         The default is taken from .asaprc (False)
1703
1704        """
1705        if insitu is None: insitu = rcParams['insitu']
1706        self._math._setinsitu(insitu)
1707        varlist = vars()
1708        s = scantable(self._math._resample(self, method, width))
1709        s._add_history("resample", varlist)
1710        if insitu: self._assign(s)
1711        else: return s
1712
1713    @asaplog_post_dec
1714    def average_pol(self, mask=None, weight='none'):
1715        """\
1716        Average the Polarisations together.
1717
1718        Parameters:
1719
1720            mask:        An optional mask defining the region, where the
1721                         averaging will be applied. The output will have all
1722                         specified points masked.
1723
1724            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1725                         weighted), or 'tsys' (1/Tsys**2 weighted)
1726
1727        """
1728        varlist = vars()
1729        mask = mask or ()
1730        s = scantable(self._math._averagepol(self, mask, weight.upper()))
1731        s._add_history("average_pol", varlist)
1732        return s
1733
1734    @asaplog_post_dec
1735    def average_beam(self, mask=None, weight='none'):
1736        """\
1737        Average the Beams together.
1738
1739        Parameters:
1740            mask:        An optional mask defining the region, where the
1741                         averaging will be applied. The output will have all
1742                         specified points masked.
1743
1744            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1745                         weighted), or 'tsys' (1/Tsys**2 weighted)
1746
1747        """
1748        varlist = vars()
1749        mask = mask or ()
1750        s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1751        s._add_history("average_beam", varlist)
1752        return s
1753
1754    def parallactify(self, pflag):
1755        """\
1756        Set a flag to indicate whether this data should be treated as having
1757        been 'parallactified' (total phase == 0.0)
1758
1759        Parameters:
1760
1761            pflag:  Bool indicating whether to turn this on (True) or
1762                    off (False)
1763
1764        """
1765        varlist = vars()
1766        self._parallactify(pflag)
1767        self._add_history("parallactify", varlist)
1768
1769    @asaplog_post_dec
1770    def convert_pol(self, poltype=None):
1771        """\
1772        Convert the data to a different polarisation type.
1773        Note that you will need cross-polarisation terms for most conversions.
1774
1775        Parameters:
1776
1777            poltype:    The new polarisation type. Valid types are:
1778                        "linear", "circular", "stokes" and "linpol"
1779
1780        """
1781        varlist = vars()
1782        s = scantable(self._math._convertpol(self, poltype))
1783        s._add_history("convert_pol", varlist)
1784        return s
1785
1786    @asaplog_post_dec
1787    def smooth(self, kernel="hanning", width=5.0, order=2, plot=False, insitu=None):
1788        """\
1789        Smooth the spectrum by the specified kernel (conserving flux).
1790
1791        Parameters:
1792
1793            kernel:     The type of smoothing kernel. Select from
1794                        'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1795                        or 'poly'
1796
1797            width:      The width of the kernel in pixels. For hanning this is
1798                        ignored otherwise it defauls to 5 pixels.
1799                        For 'gaussian' it is the Full Width Half
1800                        Maximum. For 'boxcar' it is the full width.
1801                        For 'rmedian' and 'poly' it is the half width.
1802
1803            order:      Optional parameter for 'poly' kernel (default is 2), to
1804                        specify the order of the polnomial. Ignored by all other
1805                        kernels.
1806
1807            plot:       plot the original and the smoothed spectra.
1808                        In this each indivual fit has to be approved, by
1809                        typing 'y' or 'n'
1810
1811            insitu:     if False a new scantable is returned.
1812                        Otherwise, the scaling is done in-situ
1813                        The default is taken from .asaprc (False)
1814
1815        """
1816        if insitu is None: insitu = rcParams['insitu']
1817        self._math._setinsitu(insitu)
1818        varlist = vars()
1819
1820        if plot: orgscan = self.copy()
1821
1822        s = scantable(self._math._smooth(self, kernel.lower(), width, order))
1823        s._add_history("smooth", varlist)
1824
1825        if plot:
1826            if rcParams['plotter.gui']:
1827                from asap.asaplotgui import asaplotgui as asaplot
1828            else:
1829                from asap.asaplot import asaplot
1830            self._p=asaplot()
1831            self._p.set_panels()
1832            ylab=s._get_ordinate_label()
1833            #self._p.palette(0,["#777777","red"])
1834            for r in xrange(s.nrow()):
1835                xsm=s._getabcissa(r)
1836                ysm=s._getspectrum(r)
1837                xorg=orgscan._getabcissa(r)
1838                yorg=orgscan._getspectrum(r)
1839                self._p.clear()
1840                self._p.hold()
1841                self._p.set_axes('ylabel',ylab)
1842                self._p.set_axes('xlabel',s._getabcissalabel(r))
1843                self._p.set_axes('title',s._getsourcename(r))
1844                self._p.set_line(label='Original',color="#777777")
1845                self._p.plot(xorg,yorg)
1846                self._p.set_line(label='Smoothed',color="red")
1847                self._p.plot(xsm,ysm)
1848                ### Ugly part for legend
1849                for i in [0,1]:
1850                    self._p.subplots[0]['lines'].append([self._p.subplots[0]['axes'].lines[i]])
1851                self._p.release()
1852                ### Ugly part for legend
1853                self._p.subplots[0]['lines']=[]
1854                res = raw_input("Accept smoothing ([y]/n): ")
1855                if res.upper() == 'N':
1856                    s._setspectrum(yorg, r)
1857            self._p.unmap()
1858            self._p = None
1859            del orgscan
1860
1861        if insitu: self._assign(s)
1862        else: return s
1863
1864    @asaplog_post_dec
1865    def old_poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None, rows=None):
1866        """\
1867        Return a scan which has been baselined (all rows) by a polynomial.
1868       
1869        Parameters:
1870
1871            mask:       an optional mask
1872
1873            order:      the order of the polynomial (default is 0)
1874
1875            plot:       plot the fit and the residual. In this each
1876                        indivual fit has to be approved, by typing 'y'
1877                        or 'n'
1878
1879            uselin:     use linear polynomial fit
1880
1881            insitu:     if False a new scantable is returned.
1882                        Otherwise, the scaling is done in-situ
1883                        The default is taken from .asaprc (False)
1884
1885            rows:       row numbers of spectra to be processed.
1886                        (default is None: for all rows)
1887       
1888        Example:
1889            # return a scan baselined by a third order polynomial,
1890            # not using a mask
1891            bscan = scan.poly_baseline(order=3)
1892
1893        """
1894        if insitu is None: insitu = rcParams['insitu']
1895        if not insitu:
1896            workscan = self.copy()
1897        else:
1898            workscan = self
1899        varlist = vars()
1900        if mask is None:
1901            mask = [True for i in xrange(self.nchan())]
1902
1903        try:
1904            f = fitter()
1905            if uselin:
1906                f.set_function(lpoly=order)
1907            else:
1908                f.set_function(poly=order)
1909
1910            if rows == None:
1911                rows = xrange(workscan.nrow())
1912            elif isinstance(rows, int):
1913                rows = [ rows ]
1914           
1915            if len(rows) > 0:
1916                self.blpars = []
1917                self.masklists = []
1918                self.actualmask = []
1919           
1920            for r in rows:
1921                f.x = workscan._getabcissa(r)
1922                f.y = workscan._getspectrum(r)
1923                f.mask = mask_and(mask, workscan._getmask(r))    # (CAS-1434)
1924                f.data = None
1925                f.fit()
1926                if plot:
1927                    f.plot(residual=True)
1928                    x = raw_input("Accept fit ( [y]/n ): ")
1929                    if x.upper() == 'N':
1930                        self.blpars.append(None)
1931                        self.masklists.append(None)
1932                        self.actualmask.append(None)
1933                        continue
1934                workscan._setspectrum(f.fitter.getresidual(), r)
1935                self.blpars.append(f.get_parameters())
1936                self.masklists.append(workscan.get_masklist(f.mask, row=r))
1937                self.actualmask.append(f.mask)
1938
1939            if plot:
1940                f._p.unmap()
1941                f._p = None
1942            workscan._add_history("poly_baseline", varlist)
1943            if insitu:
1944                self._assign(workscan)
1945            else:
1946                return workscan
1947        except RuntimeError:
1948            msg = "The fit failed, possibly because it didn't converge."
1949            raise RuntimeError(msg)
1950
1951
1952    def poly_baseline(self, mask=None, order=0, plot=False, batch=False, insitu=None, rows=None):
1953        """\
1954        Return a scan which has been baselined (all rows) by a polynomial.
1955        Parameters:
1956            mask:       an optional mask
1957            order:      the order of the polynomial (default is 0)
1958            plot:       plot the fit and the residual. In this each
1959                        indivual fit has to be approved, by typing 'y'
1960                        or 'n'. Ignored if batch = True.
1961            batch:      if True a faster algorithm is used and logs
1962                        including the fit results are not output
1963                        (default is False)
1964            insitu:     if False a new scantable is returned.
1965                        Otherwise, the scaling is done in-situ
1966                        The default is taken from .asaprc (False)
1967            rows:       row numbers of spectra to be processed.
1968                        (default is None: for all rows)
1969        Example:
1970            # return a scan baselined by a third order polynomial,
1971            # not using a mask
1972            bscan = scan.poly_baseline(order=3)
1973        """
1974        if insitu is None: insitu = rcParams["insitu"]
1975        if insitu:
1976            workscan = self
1977        else:
1978            workscan = self.copy()
1979
1980        varlist = vars()
1981        nchan = workscan.nchan()
1982       
1983        if mask is None:
1984            mask = [True for i in xrange(nchan)]
1985
1986        try:
1987            if rows == None:
1988                rows = xrange(workscan.nrow())
1989            elif isinstance(rows, int):
1990                rows = [ rows ]
1991           
1992            if len(rows) > 0:
1993                self.blpars = []
1994                self.masklists = []
1995                self.actualmask = []
1996
1997            if batch:
1998                for r in rows:
1999                    workscan._poly_baseline_batch(mask, order, r)
2000            elif plot:
2001                f = fitter()
2002                f.set_function(lpoly=order)
2003                for r in rows:
2004                    f.x = workscan._getabcissa(r)
2005                    f.y = workscan._getspectrum(r)
2006                    f.mask = mask_and(mask, workscan._getmask(r))    # (CAS-1434)
2007                    f.data = None
2008                    f.fit()
2009                   
2010                    f.plot(residual=True)
2011                    accept_fit = raw_input("Accept fit ( [y]/n ): ")
2012                    if accept_fit.upper() == "N":
2013                        self.blpars.append(None)
2014                        self.masklists.append(None)
2015                        self.actualmask.append(None)
2016                        continue
2017                    workscan._setspectrum(f.fitter.getresidual(), r)
2018                    self.blpars.append(f.get_parameters())
2019                    self.masklists.append(workscan.get_masklist(f.mask, row=r))
2020                    self.actualmask.append(f.mask)
2021                   
2022                f._p.unmap()
2023                f._p = None
2024            else:
2025                import array
2026                for r in rows:
2027                    pars = array.array("f", [0.0 for i in range(order+1)])
2028                    pars_adr = pars.buffer_info()[0]
2029                    pars_len = pars.buffer_info()[1]
2030                   
2031                    errs = array.array("f", [0.0 for i in range(order+1)])
2032                    errs_adr = errs.buffer_info()[0]
2033                    errs_len = errs.buffer_info()[1]
2034                   
2035                    fmsk = array.array("i", [1 for i in range(nchan)])
2036                    fmsk_adr = fmsk.buffer_info()[0]
2037                    fmsk_len = fmsk.buffer_info()[1]
2038                   
2039                    workscan._poly_baseline(mask, order, r, pars_adr, pars_len, errs_adr, errs_len, fmsk_adr, fmsk_len)
2040                   
2041                    params = pars.tolist()
2042                    fmtd = ""
2043                    for i in xrange(len(params)): fmtd += "  p%d= %3.6f," % (i, params[i])
2044                    fmtd = fmtd[:-1]  # remove trailing ","
2045                    errors = errs.tolist()
2046                    fmask = fmsk.tolist()
2047                    for i in xrange(len(fmask)): fmask[i] = (fmask[i] > 0)    # transform (1/0) -> (True/False)
2048                   
2049                    self.blpars.append({"params":params, "fixed":[], "formatted":fmtd, "errors":errors})
2050                    self.masklists.append(workscan.get_masklist(fmask, r))
2051                    self.actualmask.append(fmask)
2052                   
2053                    asaplog.push(str(fmtd))
2054           
2055            workscan._add_history("poly_baseline", varlist)
2056           
2057            if insitu:
2058                self._assign(workscan)
2059            else:
2060                return workscan
2061           
2062        except RuntimeError, e:
2063            msg = "The fit failed, possibly because it didn't converge."
2064            if rcParams["verbose"]:
2065                asaplog.push(str(e))
2066                asaplog.push(str(msg))
2067                return
2068            else:
2069                raise RuntimeError(str(e)+'\n'+msg)
2070
2071
2072    def auto_poly_baseline(self, mask=None, edge=(0, 0), order=0,
2073                           threshold=3, chan_avg_limit=1, plot=False,
2074                           insitu=None, rows=None):
2075        """\
2076        Return a scan which has been baselined (all rows) by a polynomial.
2077        Spectral lines are detected first using linefinder and masked out
2078        to avoid them affecting the baseline solution.
2079
2080        Parameters:
2081
2082            mask:       an optional mask retreived from scantable
2083
2084            edge:       an optional number of channel to drop at the edge of
2085                        spectrum. If only one value is
2086                        specified, the same number will be dropped from
2087                        both sides of the spectrum. Default is to keep
2088                        all channels. Nested tuples represent individual
2089                        edge selection for different IFs (a number of spectral
2090                        channels can be different)
2091
2092            order:      the order of the polynomial (default is 0)
2093
2094            threshold:  the threshold used by line finder. It is better to
2095                        keep it large as only strong lines affect the
2096                        baseline solution.
2097
2098            chan_avg_limit:
2099                        a maximum number of consequtive spectral channels to
2100                        average during the search of weak and broad lines.
2101                        The default is no averaging (and no search for weak
2102                        lines). If such lines can affect the fitted baseline
2103                        (e.g. a high order polynomial is fitted), increase this
2104                        parameter (usually values up to 8 are reasonable). Most
2105                        users of this method should find the default value
2106                        sufficient.
2107
2108            plot:       plot the fit and the residual. In this each
2109                        indivual fit has to be approved, by typing 'y'
2110                        or 'n'
2111
2112            insitu:     if False a new scantable is returned.
2113                        Otherwise, the scaling is done in-situ
2114                        The default is taken from .asaprc (False)
2115            rows:       row numbers of spectra to be processed.
2116                        (default is None: for all rows)
2117
2118
2119        Example::
2120
2121            scan2 = scan.auto_poly_baseline(order=7, insitu=False)
2122
2123        """
2124        if insitu is None: insitu = rcParams['insitu']
2125        varlist = vars()
2126        from asap.asaplinefind import linefinder
2127        from asap import _is_sequence_or_number as _is_valid
2128
2129        # check whether edge is set up for each IF individually
2130        individualedge = False;
2131        if len(edge) > 1:
2132            if isinstance(edge[0], list) or isinstance(edge[0], tuple):
2133                individualedge = True;
2134
2135        if not _is_valid(edge, int) and not individualedge:
2136            raise ValueError, "Parameter 'edge' has to be an integer or a \
2137            pair of integers specified as a tuple. Nested tuples are allowed \
2138            to make individual selection for different IFs."
2139
2140        curedge = (0, 0)
2141        if individualedge:
2142            for edgepar in edge:
2143                if not _is_valid(edgepar, int):
2144                    raise ValueError, "Each element of the 'edge' tuple has \
2145                                       to be a pair of integers or an integer."
2146        else:
2147            curedge = edge;
2148
2149        if not insitu:
2150            workscan = self.copy()
2151        else:
2152            workscan = self
2153
2154        # setup fitter
2155        f = fitter()
2156        f.set_function(lpoly=order)
2157
2158        # setup line finder
2159        fl = linefinder()
2160        fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
2161
2162        fl.set_scan(workscan)
2163
2164        if mask is None:
2165            mask = _n_bools(workscan.nchan(), True)
2166       
2167        if rows is None:
2168            rows = xrange(workscan.nrow())
2169        elif isinstance(rows, int):
2170            rows = [ rows ]
2171       
2172        # Save parameters of baseline fits & masklists as a class attribute.
2173        # NOTICE: It does not reflect changes in scantable!
2174        if len(rows) > 0:
2175            self.blpars=[]
2176            self.masklists=[]
2177            self.actualmask=[]
2178        asaplog.push("Processing:")
2179        for r in rows:
2180            msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
2181                (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
2182                 workscan.getpol(r), workscan.getcycle(r))
2183            asaplog.push(msg, False)
2184
2185            # figure out edge parameter
2186            if individualedge:
2187                if len(edge) >= workscan.getif(r):
2188                    raise RuntimeError, "Number of edge elements appear to " \
2189                                        "be less than the number of IFs"
2190                    curedge = edge[workscan.getif(r)]
2191
2192            actualmask = mask_and(mask, workscan._getmask(r))    # (CAS-1434)
2193
2194            # setup line finder
2195            fl.find_lines(r, actualmask, curedge)
2196           
2197            f.x = workscan._getabcissa(r)
2198            f.y = workscan._getspectrum(r)
2199            f.mask = fl.get_mask()
2200            f.data = None
2201            f.fit()
2202
2203            # Show mask list
2204            masklist=workscan.get_masklist(f.mask, row=r)
2205            msg = "mask range: "+str(masklist)
2206            asaplog.push(msg, False)
2207
2208            if plot:
2209                f.plot(residual=True)
2210                x = raw_input("Accept fit ( [y]/n ): ")
2211                if x.upper() == 'N':
2212                    self.blpars.append(None)
2213                    self.masklists.append(None)
2214                    self.actualmask.append(None)
2215                    continue
2216
2217            workscan._setspectrum(f.fitter.getresidual(), r)
2218            self.blpars.append(f.get_parameters())
2219            self.masklists.append(masklist)
2220            self.actualmask.append(f.mask)
2221        if plot:
2222            f._p.unmap()
2223            f._p = None
2224        workscan._add_history("auto_poly_baseline", varlist)
2225        if insitu:
2226            self._assign(workscan)
2227        else:
2228            return workscan
2229
2230    @asaplog_post_dec
2231    def rotate_linpolphase(self, angle):
2232        """\
2233        Rotate the phase of the complex polarization O=Q+iU correlation.
2234        This is always done in situ in the raw data.  So if you call this
2235        function more than once then each call rotates the phase further.
2236
2237        Parameters:
2238
2239            angle:   The angle (degrees) to rotate (add) by.
2240
2241        Example::
2242
2243            scan.rotate_linpolphase(2.3)
2244
2245        """
2246        varlist = vars()
2247        self._math._rotate_linpolphase(self, angle)
2248        self._add_history("rotate_linpolphase", varlist)
2249        return
2250
2251    @asaplog_post_dec
2252    def rotate_xyphase(self, angle):
2253        """\
2254        Rotate the phase of the XY correlation.  This is always done in situ
2255        in the data.  So if you call this function more than once
2256        then each call rotates the phase further.
2257
2258        Parameters:
2259
2260            angle:   The angle (degrees) to rotate (add) by.
2261
2262        Example::
2263
2264            scan.rotate_xyphase(2.3)
2265
2266        """
2267        varlist = vars()
2268        self._math._rotate_xyphase(self, angle)
2269        self._add_history("rotate_xyphase", varlist)
2270        return
2271
2272    @asaplog_post_dec
2273    def swap_linears(self):
2274        """\
2275        Swap the linear polarisations XX and YY, or better the first two
2276        polarisations as this also works for ciculars.
2277        """
2278        varlist = vars()
2279        self._math._swap_linears(self)
2280        self._add_history("swap_linears", varlist)
2281        return
2282
2283    @asaplog_post_dec
2284    def invert_phase(self):
2285        """\
2286        Invert the phase of the complex polarisation
2287        """
2288        varlist = vars()
2289        self._math._invert_phase(self)
2290        self._add_history("invert_phase", varlist)
2291        return
2292
2293    @asaplog_post_dec
2294    def add(self, offset, insitu=None):
2295        """\
2296        Return a scan where all spectra have the offset added
2297
2298        Parameters:
2299
2300            offset:      the offset
2301
2302            insitu:      if False a new scantable is returned.
2303                         Otherwise, the scaling is done in-situ
2304                         The default is taken from .asaprc (False)
2305
2306        """
2307        if insitu is None: insitu = rcParams['insitu']
2308        self._math._setinsitu(insitu)
2309        varlist = vars()
2310        s = scantable(self._math._unaryop(self, offset, "ADD", False))
2311        s._add_history("add", varlist)
2312        if insitu:
2313            self._assign(s)
2314        else:
2315            return s
2316
2317    @asaplog_post_dec
2318    def scale(self, factor, tsys=True, insitu=None):
2319        """\
2320
2321        Return a scan where all spectra are scaled by the give 'factor'
2322
2323        Parameters:
2324
2325            factor:      the scaling factor (float or 1D float list)
2326
2327            insitu:      if False a new scantable is returned.
2328                         Otherwise, the scaling is done in-situ
2329                         The default is taken from .asaprc (False)
2330
2331            tsys:        if True (default) then apply the operation to Tsys
2332                         as well as the data
2333
2334        """
2335        if insitu is None: insitu = rcParams['insitu']
2336        self._math._setinsitu(insitu)
2337        varlist = vars()
2338        s = None
2339        import numpy
2340        if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
2341            if isinstance(factor[0], list) or isinstance(factor[0], numpy.ndarray):
2342                from asapmath import _array2dOp
2343                s = _array2dOp( self.copy(), factor, "MUL", tsys )
2344            else:
2345                s = scantable( self._math._arrayop( self.copy(), factor, "MUL", tsys ) )
2346        else:
2347            s = scantable(self._math._unaryop(self.copy(), factor, "MUL", tsys))
2348        s._add_history("scale", varlist)
2349        if insitu:
2350            self._assign(s)
2351        else:
2352            return s
2353
2354    def set_sourcetype(self, match, matchtype="pattern",
2355                       sourcetype="reference"):
2356        """\
2357        Set the type of the source to be an source or reference scan
2358        using the provided pattern.
2359
2360        Parameters:
2361
2362            match:          a Unix style pattern, regular expression or selector
2363
2364            matchtype:      'pattern' (default) UNIX style pattern or
2365                            'regex' regular expression
2366
2367            sourcetype:     the type of the source to use (source/reference)
2368
2369        """
2370        varlist = vars()
2371        basesel = self.get_selection()
2372        stype = -1
2373        if sourcetype.lower().startswith("r"):
2374            stype = 1
2375        elif sourcetype.lower().startswith("s"):
2376            stype = 0
2377        else:
2378            raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
2379        if matchtype.lower().startswith("p"):
2380            matchtype = "pattern"
2381        elif matchtype.lower().startswith("r"):
2382            matchtype = "regex"
2383        else:
2384            raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
2385        sel = selector()
2386        if isinstance(match, selector):
2387            sel = match
2388        else:
2389            sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
2390        self.set_selection(basesel+sel)
2391        self._setsourcetype(stype)
2392        self.set_selection(basesel)
2393        self._add_history("set_sourcetype", varlist)
2394
2395    @asaplog_post_dec
2396    @preserve_selection
2397    def auto_quotient(self, preserve=True, mode='paired', verify=False):
2398        """\
2399        This function allows to build quotients automatically.
2400        It assumes the observation to have the same number of
2401        "ons" and "offs"
2402
2403        Parameters:
2404
2405            preserve:       you can preserve (default) the continuum or
2406                            remove it.  The equations used are
2407
2408                            preserve: Output = Toff * (on/off) - Toff
2409
2410                            remove:   Output = Toff * (on/off) - Ton
2411
2412            mode:           the on/off detection mode
2413                            'paired' (default)
2414                            identifies 'off' scans by the
2415                            trailing '_R' (Mopra/Parkes) or
2416                            '_e'/'_w' (Tid) and matches
2417                            on/off pairs from the observing pattern
2418                            'time'
2419                            finds the closest off in time
2420
2421        .. todo:: verify argument is not implemented
2422
2423        """
2424        varlist = vars()
2425        modes = ["time", "paired"]
2426        if not mode in modes:
2427            msg = "please provide valid mode. Valid modes are %s" % (modes)
2428            raise ValueError(msg)
2429        s = None
2430        if mode.lower() == "paired":
2431            sel = self.get_selection()
2432            sel.set_query("SRCTYPE==psoff")
2433            self.set_selection(sel)
2434            offs = self.copy()
2435            sel.set_query("SRCTYPE==pson")
2436            self.set_selection(sel)
2437            ons = self.copy()
2438            s = scantable(self._math._quotient(ons, offs, preserve))
2439        elif mode.lower() == "time":
2440            s = scantable(self._math._auto_quotient(self, mode, preserve))
2441        s._add_history("auto_quotient", varlist)
2442        return s
2443
2444    @asaplog_post_dec
2445    def mx_quotient(self, mask = None, weight='median', preserve=True):
2446        """\
2447        Form a quotient using "off" beams when observing in "MX" mode.
2448
2449        Parameters:
2450
2451            mask:           an optional mask to be used when weight == 'stddev'
2452
2453            weight:         How to average the off beams.  Default is 'median'.
2454
2455            preserve:       you can preserve (default) the continuum or
2456                            remove it.  The equations used are:
2457
2458                                preserve: Output = Toff * (on/off) - Toff
2459
2460                                remove:   Output = Toff * (on/off) - Ton
2461
2462        """
2463        mask = mask or ()
2464        varlist = vars()
2465        on = scantable(self._math._mx_extract(self, 'on'))
2466        preoff = scantable(self._math._mx_extract(self, 'off'))
2467        off = preoff.average_time(mask=mask, weight=weight, scanav=False)
2468        from asapmath  import quotient
2469        q = quotient(on, off, preserve)
2470        q._add_history("mx_quotient", varlist)
2471        return q
2472
2473    @asaplog_post_dec
2474    def freq_switch(self, insitu=None):
2475        """\
2476        Apply frequency switching to the data.
2477
2478        Parameters:
2479
2480            insitu:      if False a new scantable is returned.
2481                         Otherwise, the swictching is done in-situ
2482                         The default is taken from .asaprc (False)
2483
2484        """
2485        if insitu is None: insitu = rcParams['insitu']
2486        self._math._setinsitu(insitu)
2487        varlist = vars()
2488        s = scantable(self._math._freqswitch(self))
2489        s._add_history("freq_switch", varlist)
2490        if insitu:
2491            self._assign(s)
2492        else:
2493            return s
2494
2495    @asaplog_post_dec
2496    def recalc_azel(self):
2497        """Recalculate the azimuth and elevation for each position."""
2498        varlist = vars()
2499        self._recalcazel()
2500        self._add_history("recalc_azel", varlist)
2501        return
2502
2503    @asaplog_post_dec
2504    def __add__(self, other):
2505        varlist = vars()
2506        s = None
2507        if isinstance(other, scantable):
2508            s = scantable(self._math._binaryop(self, other, "ADD"))
2509        elif isinstance(other, float):
2510            s = scantable(self._math._unaryop(self, other, "ADD", False))
2511        else:
2512            raise TypeError("Other input is not a scantable or float value")
2513        s._add_history("operator +", varlist)
2514        return s
2515
2516    @asaplog_post_dec
2517    def __sub__(self, other):
2518        """
2519        implicit on all axes and on Tsys
2520        """
2521        varlist = vars()
2522        s = None
2523        if isinstance(other, scantable):
2524            s = scantable(self._math._binaryop(self, other, "SUB"))
2525        elif isinstance(other, float):
2526            s = scantable(self._math._unaryop(self, other, "SUB", False))
2527        else:
2528            raise TypeError("Other input is not a scantable or float value")
2529        s._add_history("operator -", varlist)
2530        return s
2531
2532    @asaplog_post_dec
2533    def __mul__(self, other):
2534        """
2535        implicit on all axes and on Tsys
2536        """
2537        varlist = vars()
2538        s = None
2539        if isinstance(other, scantable):
2540            s = scantable(self._math._binaryop(self, other, "MUL"))
2541        elif isinstance(other, float):
2542            s = scantable(self._math._unaryop(self, other, "MUL", False))
2543        else:
2544            raise TypeError("Other input is not a scantable or float value")
2545        s._add_history("operator *", varlist)
2546        return s
2547
2548
2549    @asaplog_post_dec
2550    def __div__(self, other):
2551        """
2552        implicit on all axes and on Tsys
2553        """
2554        varlist = vars()
2555        s = None
2556        if isinstance(other, scantable):
2557            s = scantable(self._math._binaryop(self, other, "DIV"))
2558        elif isinstance(other, float):
2559            if other == 0.0:
2560                raise ZeroDivisionError("Dividing by zero is not recommended")
2561            s = scantable(self._math._unaryop(self, other, "DIV", False))
2562        else:
2563            raise TypeError("Other input is not a scantable or float value")
2564        s._add_history("operator /", varlist)
2565        return s
2566
2567    @asaplog_post_dec
2568    def get_fit(self, row=0):
2569        """\
2570        Print or return the stored fits for a row in the scantable
2571
2572        Parameters:
2573
2574            row:    the row which the fit has been applied to.
2575
2576        """
2577        if row > self.nrow():
2578            return
2579        from asap.asapfit import asapfit
2580        fit = asapfit(self._getfit(row))
2581        asaplog.push( '%s' %(fit) )
2582        return fit.as_dict()
2583
2584    def flag_nans(self):
2585        """\
2586        Utility function to flag NaN values in the scantable.
2587        """
2588        import numpy
2589        basesel = self.get_selection()
2590        for i in range(self.nrow()):
2591            sel = self.get_row_selector(i)
2592            self.set_selection(basesel+sel)
2593            nans = numpy.isnan(self._getspectrum(0))
2594        if numpy.any(nans):
2595            bnans = [ bool(v) for v in nans]
2596            self.flag(bnans)
2597        self.set_selection(basesel)
2598
2599    def get_row_selector(self, rowno):
2600        return selector(beams=self.getbeam(rowno),
2601                        ifs=self.getif(rowno),
2602                        pols=self.getpol(rowno),
2603                        scans=self.getscan(rowno),
2604                        cycles=self.getcycle(rowno))
2605
2606    def _add_history(self, funcname, parameters):
2607        if not rcParams['scantable.history']:
2608            return
2609        # create date
2610        sep = "##"
2611        from datetime import datetime
2612        dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
2613        hist = dstr+sep
2614        hist += funcname+sep#cdate+sep
2615        if parameters.has_key('self'): del parameters['self']
2616        for k, v in parameters.iteritems():
2617            if type(v) is dict:
2618                for k2, v2 in v.iteritems():
2619                    hist += k2
2620                    hist += "="
2621                    if isinstance(v2, scantable):
2622                        hist += 'scantable'
2623                    elif k2 == 'mask':
2624                        if isinstance(v2, list) or isinstance(v2, tuple):
2625                            hist += str(self._zip_mask(v2))
2626                        else:
2627                            hist += str(v2)
2628                    else:
2629                        hist += str(v2)
2630            else:
2631                hist += k
2632                hist += "="
2633                if isinstance(v, scantable):
2634                    hist += 'scantable'
2635                elif k == 'mask':
2636                    if isinstance(v, list) or isinstance(v, tuple):
2637                        hist += str(self._zip_mask(v))
2638                    else:
2639                        hist += str(v)
2640                else:
2641                    hist += str(v)
2642            hist += sep
2643        hist = hist[:-2] # remove trailing '##'
2644        self._addhistory(hist)
2645
2646
2647    def _zip_mask(self, mask):
2648        mask = list(mask)
2649        i = 0
2650        segments = []
2651        while mask[i:].count(1):
2652            i += mask[i:].index(1)
2653            if mask[i:].count(0):
2654                j = i + mask[i:].index(0)
2655            else:
2656                j = len(mask)
2657            segments.append([i, j])
2658            i = j
2659        return segments
2660
2661    def _get_ordinate_label(self):
2662        fu = "("+self.get_fluxunit()+")"
2663        import re
2664        lbl = "Intensity"
2665        if re.match(".K.", fu):
2666            lbl = "Brightness Temperature "+ fu
2667        elif re.match(".Jy.", fu):
2668            lbl = "Flux density "+ fu
2669        return lbl
2670
2671    def _check_ifs(self):
2672        nchans = [self.nchan(i) for i in range(self.nif(-1))]
2673        nchans = filter(lambda t: t > 0, nchans)
2674        return (sum(nchans)/len(nchans) == nchans[0])
2675
2676    @asaplog_post_dec
2677    #def _fill(self, names, unit, average, getpt, antenna):
2678    def _fill(self, names, unit, average, opts={}):
2679        first = True
2680        fullnames = []
2681        for name in names:
2682            name = os.path.expandvars(name)
2683            name = os.path.expanduser(name)
2684            if not os.path.exists(name):
2685                msg = "File '%s' does not exists" % (name)
2686                raise IOError(msg)
2687            fullnames.append(name)
2688        if average:
2689            asaplog.push('Auto averaging integrations')
2690        stype = int(rcParams['scantable.storage'].lower() == 'disk')
2691        for name in fullnames:
2692            tbl = Scantable(stype)
2693            r = filler(tbl)
2694            rx = rcParams['scantable.reference']
2695            r.setreferenceexpr(rx)
2696            msg = "Importing %s..." % (name)
2697            asaplog.push(msg, False)
2698            #opts = {'ms': {'antenna' : antenna, 'getpt': getpt} }
2699            r.open(name, opts)# antenna, -1, -1, getpt)
2700            r.fill()
2701            if average:
2702                tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
2703            if not first:
2704                tbl = self._math._merge([self, tbl])
2705            Scantable.__init__(self, tbl)
2706            r.close()
2707            del r, tbl
2708            first = False
2709            #flush log
2710        asaplog.post()
2711        if unit is not None:
2712            self.set_fluxunit(unit)
2713        if not is_casapy():
2714            self.set_freqframe(rcParams['scantable.freqframe'])
2715
2716    def __getitem__(self, key):
2717        if key < 0:
2718            key += self.nrow()
2719        if key >= self.nrow():
2720            raise IndexError("Row index out of range.")
2721        return self._getspectrum(key)
2722
2723    def __setitem__(self, key, value):
2724        if key < 0:
2725            key += self.nrow()
2726        if key >= self.nrow():
2727            raise IndexError("Row index out of range.")
2728        if not hasattr(value, "__len__") or \
2729                len(value) > self.nchan(self.getif(key)):
2730            raise ValueError("Spectrum length doesn't match.")
2731        return self._setspectrum(value, key)
2732
2733    def __len__(self):
2734        return self.nrow()
2735
2736    def __iter__(self):
2737        for i in range(len(self)):
2738            yield self[i]
Note: See TracBrowser for help on using the repository browser.