source: trunk/python/scantable.py @ 1928

Last change on this file since 1928 was 1928, checked in by Takeshi Nakazato, 14 years ago

New Development: No

JIRA Issue: No

Ready for Test: Yes

Interface Changes: Yes/No?

What Interface Changed: Please list interface changes

Test Programs: List test programs

Put in Release Notes: Yes/No?

Module(s): Module Names change impacts.

Description: Describe your changes here...

Fixed typo in help and bug in scantable.history().

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 93.0 KB
Line 
1"""This module defines the scantable class."""
2
3import os
4try:
5    from functools import wraps as wraps_dec
6except ImportError:
7    from asap.compatibility import wraps as wraps_dec
8
9from asap.env import is_casapy
10from asap._asap import Scantable
11from asap._asap import filler
12from asap.parameters import rcParams
13from asap.logging import asaplog, asaplog_post_dec
14from asap.selector import selector
15from asap.linecatalog import linecatalog
16from asap.coordinate import coordinate
17from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
18from asap.asapfitter import fitter
19
20
21def preserve_selection(func):
22    @wraps_dec(func)
23    def wrap(obj, *args, **kw):
24        basesel = obj.get_selection()
25        try:
26            val = func(obj, *args, **kw)
27        finally:
28            obj.set_selection(basesel)
29        return val
30    return wrap
31
32def is_scantable(filename):
33    """Is the given file a scantable?
34
35    Parameters:
36
37        filename: the name of the file/directory to test
38
39    """
40    if ( os.path.isdir(filename)
41         and os.path.exists(filename+'/table.info')
42         and os.path.exists(filename+'/table.dat') ):
43        f=open(filename+'/table.info')
44        l=f.readline()
45        f.close()
46        #if ( l.find('Scantable') != -1 ):
47        if ( l.find('Measurement Set') == -1 ):
48            return True
49        else:
50            return False
51    else:
52        return False
53##     return (os.path.isdir(filename)
54##             and not os.path.exists(filename+'/table.f1')
55##             and os.path.exists(filename+'/table.info'))
56
57def is_ms(filename):
58    """Is the given file a MeasurementSet?
59
60    Parameters:
61
62        filename: the name of the file/directory to test
63
64    """
65    if ( os.path.isdir(filename)
66         and os.path.exists(filename+'/table.info')
67         and os.path.exists(filename+'/table.dat') ):
68        f=open(filename+'/table.info')
69        l=f.readline()
70        f.close()
71        if ( l.find('Measurement Set') != -1 ):
72            return True
73        else:
74            return False
75    else:
76        return False
77   
78class scantable(Scantable):
79    """\
80        The ASAP container for scans (single-dish data).
81    """
82
83    @asaplog_post_dec
84    #def __init__(self, filename, average=None, unit=None, getpt=None,
85    #             antenna=None, parallactify=None):
86    def __init__(self, filename, average=None, unit=None, parallactify=None, **args):
87        """\
88        Create a scantable from a saved one or make a reference
89
90        Parameters:
91
92            filename:     the name of an asap table on disk
93                          or
94                          the name of a rpfits/sdfits/ms file
95                          (integrations within scans are auto averaged
96                          and the whole file is read) or
97                          [advanced] a reference to an existing scantable
98
99            average:      average all integrations withinb a scan on read.
100                          The default (True) is taken from .asaprc.
101
102            unit:         brightness unit; must be consistent with K or Jy.
103                          Over-rides the default selected by the filler
104                          (input rpfits/sdfits/ms) or replaces the value
105                          in existing scantables
106
107            getpt:        for MeasurementSet input data only:
108                          If True, all pointing data are filled.
109                          The deafult is False, which makes time to load
110                          the MS data faster in some cases.
111
112            antenna:      for MeasurementSet input data only:
113                          Antenna selection. integer (id) or string (name or id).
114
115            parallactify: Indicate that the data had been parallatified. Default
116                          is taken from rc file.
117
118        """
119        if average is None:
120            average = rcParams['scantable.autoaverage']
121        #if getpt is None:
122        #    getpt = True
123        #if antenna is not None:
124        #    asaplog.push("Antenna selection currently unsupported."
125        #                 "Using ''")
126        #    asaplog.post('WARN')
127        #if antenna is None:
128        #    antenna = ''
129        #elif type(antenna) == int:
130        #    antenna = '%s' % antenna
131        #elif type(antenna) == list:
132        #    tmpstr = ''
133        #    for i in range( len(antenna) ):
134        #        if type(antenna[i]) == int:
135        #            tmpstr = tmpstr + ('%s,'%(antenna[i]))
136        #        elif type(antenna[i]) == str:
137        #            tmpstr=tmpstr+antenna[i]+','
138        #        else:
139        #            raise TypeError('Bad antenna selection.')
140        #    antenna = tmpstr.rstrip(',')
141        parallactify = parallactify or rcParams['scantable.parallactify']
142        varlist = vars()
143        from asap._asap import stmath
144        self._math = stmath( rcParams['insitu'] )
145        if isinstance(filename, Scantable):
146            Scantable.__init__(self, filename)
147        else:
148            if isinstance(filename, str):
149                filename = os.path.expandvars(filename)
150                filename = os.path.expanduser(filename)
151                if not os.path.exists(filename):
152                    s = "File '%s' not found." % (filename)
153                    raise IOError(s)
154                if is_scantable(filename):
155                    ondisk = rcParams['scantable.storage'] == 'disk'
156                    Scantable.__init__(self, filename, ondisk)
157                    if unit is not None:
158                        self.set_fluxunit(unit)
159                    # do not reset to the default freqframe
160                    #self.set_freqframe(rcParams['scantable.freqframe'])
161                #elif os.path.isdir(filename) \
162                #         and not os.path.exists(filename+'/table.f1'):
163                elif is_ms(filename):
164                    # Measurement Set
165                    opts={'ms': {}}
166                    mskeys=['getpt','antenna']
167                    for key in mskeys:
168                        if key in args.keys():
169                            opts['ms'][key] = args[key]
170                    #self._fill([filename], unit, average, getpt, antenna)
171                    self._fill([filename], unit, average, opts)
172                elif os.path.isfile(filename):
173                    #self._fill([filename], unit, average, getpt, antenna)
174                    self._fill([filename], unit, average)
175                else:
176                    msg = "The given file '%s'is not a valid " \
177                          "asap table." % (filename)
178                    raise IOError(msg)
179            elif (isinstance(filename, list) or isinstance(filename, tuple)) \
180                  and isinstance(filename[-1], str):
181                #self._fill(filename, unit, average, getpt, antenna)
182                self._fill(filename, unit, average)
183        self.parallactify(parallactify)
184        self._add_history("scantable", varlist)
185
186    @asaplog_post_dec
187    def save(self, name=None, format=None, overwrite=False):
188        """\
189        Store the scantable on disk. This can be an asap (aips++) Table,
190        SDFITS or MS2 format.
191
192        Parameters:
193
194            name:        the name of the outputfile. For format "ASCII"
195                         this is the root file name (data in 'name'.txt
196                         and header in 'name'_header.txt)
197
198            format:      an optional file format. Default is ASAP.
199                         Allowed are:
200
201                            * 'ASAP' (save as ASAP [aips++] Table),
202                            * 'SDFITS' (save as SDFITS file)
203                            * 'ASCII' (saves as ascii text file)
204                            * 'MS2' (saves as an casacore MeasurementSet V2)
205                            * 'FITS' (save as image FITS - not readable by class)
206                            * 'CLASS' (save as FITS readable by CLASS)
207
208            overwrite:   If the file should be overwritten if it exists.
209                         The default False is to return with warning
210                         without writing the output. USE WITH CARE.
211
212        Example::
213
214            scan.save('myscan.asap')
215            scan.save('myscan.sdfits', 'SDFITS')
216
217        """
218        from os import path
219        format = format or rcParams['scantable.save']
220        suffix = '.'+format.lower()
221        if name is None or name == "":
222            name = 'scantable'+suffix
223            msg = "No filename given. Using default name %s..." % name
224            asaplog.push(msg)
225        name = path.expandvars(name)
226        if path.isfile(name) or path.isdir(name):
227            if not overwrite:
228                msg = "File %s exists." % name
229                raise IOError(msg)
230        format2 = format.upper()
231        if format2 == 'ASAP':
232            self._save(name)
233        else:
234            from asap._asap import stwriter as stw
235            writer = stw(format2)
236            writer.write(self, name)
237        return
238
239    def copy(self):
240        """Return a copy of this scantable.
241
242        *Note*:
243
244            This makes a full (deep) copy. scan2 = scan1 makes a reference.
245
246        Example::
247
248            copiedscan = scan.copy()
249
250        """
251        sd = scantable(Scantable._copy(self))
252        return sd
253
254    def drop_scan(self, scanid=None):
255        """\
256        Return a new scantable where the specified scan number(s) has(have)
257        been dropped.
258
259        Parameters:
260
261            scanid:    a (list of) scan number(s)
262
263        """
264        from asap import _is_sequence_or_number as _is_valid
265        from asap import _to_list
266        from asap import unique
267        if not _is_valid(scanid):
268            raise RuntimeError( 'Please specify a scanno to drop from the scantable' )
269        scanid = _to_list(scanid)
270        allscans = unique([ self.getscan(i) for i in range(self.nrow())])
271        for sid in scanid: allscans.remove(sid)
272        if len(allscans) == 0:
273            raise ValueError("Can't remove all scans")
274        sel = selector(scans=allscans)
275        return self._select_copy(sel)
276
277    def _select_copy(self, selection):
278        orig = self.get_selection()
279        self.set_selection(orig+selection)
280        cp = self.copy()
281        self.set_selection(orig)
282        return cp
283
284    def get_scan(self, scanid=None):
285        """\
286        Return a specific scan (by scanno) or collection of scans (by
287        source name) in a new scantable.
288
289        *Note*:
290
291            See scantable.drop_scan() for the inverse operation.
292
293        Parameters:
294
295            scanid:    a (list of) scanno or a source name, unix-style
296                       patterns are accepted for source name matching, e.g.
297                       '*_R' gets all 'ref scans
298
299        Example::
300
301            # get all scans containing the source '323p459'
302            newscan = scan.get_scan('323p459')
303            # get all 'off' scans
304            refscans = scan.get_scan('*_R')
305            # get a susbset of scans by scanno (as listed in scan.summary())
306            newscan = scan.get_scan([0, 2, 7, 10])
307
308        """
309        if scanid is None:
310            raise RuntimeError( 'Please specify a scan no or name to '
311                                'retrieve from the scantable' )
312        try:
313            bsel = self.get_selection()
314            sel = selector()
315            if type(scanid) is str:
316                sel.set_name(scanid)
317                return self._select_copy(sel)
318            elif type(scanid) is int:
319                sel.set_scans([scanid])
320                return self._select_copy(sel)
321            elif type(scanid) is list:
322                sel.set_scans(scanid)
323                return self._select_copy(sel)
324            else:
325                msg = "Illegal scanid type, use 'int' or 'list' if ints."
326                raise TypeError(msg)
327        except RuntimeError:
328            raise
329
330    def __str__(self):
331        return Scantable._summary(self, True)
332
333    def summary(self, filename=None):
334        """\
335        Print a summary of the contents of this scantable.
336
337        Parameters:
338
339            filename:    the name of a file to write the output to
340                         Default - no file output
341
342        """
343        info = Scantable._summary(self, True)
344        if filename is not None:
345            if filename is "":
346                filename = 'scantable_summary.txt'
347            from os.path import expandvars, isdir
348            filename = expandvars(filename)
349            if not isdir(filename):
350                data = open(filename, 'w')
351                data.write(info)
352                data.close()
353            else:
354                msg = "Illegal file name '%s'." % (filename)
355                raise IOError(msg)
356        return page(info)
357
358    def get_spectrum(self, rowno):
359        """Return the spectrum for the current row in the scantable as a list.
360
361        Parameters:
362
363             rowno:   the row number to retrieve the spectrum from
364
365        """
366        return self._getspectrum(rowno)
367
368    def get_mask(self, rowno):
369        """Return the mask for the current row in the scantable as a list.
370
371        Parameters:
372
373             rowno:   the row number to retrieve the mask from
374
375        """
376        return self._getmask(rowno)
377
378    def set_spectrum(self, spec, rowno):
379        """Set the spectrum for the current row in the scantable.
380
381        Parameters:
382
383             spec:   the new spectrum
384
385             rowno:  the row number to set the spectrum for
386
387        """
388        assert(len(spec) == self.nchan())
389        return self._setspectrum(spec, rowno)
390
391    def get_coordinate(self, rowno):
392        """Return the (spectral) coordinate for a a given 'rowno'.
393
394        *Note*:
395
396            * This coordinate is only valid until a scantable method modifies
397              the frequency axis.
398            * This coordinate does contain the original frequency set-up
399              NOT the new frame. The conversions however are done using the user
400              specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
401              use scantable.freq_align first. Without it there is no closure,
402              i.e.::
403
404                  c = myscan.get_coordinate(0)
405                  c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
406
407        Parameters:
408
409             rowno:    the row number for the spectral coordinate
410
411        """
412        return coordinate(Scantable.get_coordinate(self, rowno))
413
414    def get_selection(self):
415        """\
416        Get the selection object currently set on this scantable.
417
418        Example::
419
420            sel = scan.get_selection()
421            sel.set_ifs(0)              # select IF 0
422            scan.set_selection(sel)     # apply modified selection
423
424        """
425        return selector(self._getselection())
426
427    def set_selection(self, selection=None, **kw):
428        """\
429        Select a subset of the data. All following operations on this scantable
430        are only applied to thi selection.
431
432        Parameters:
433
434            selection:    a selector object (default unset the selection), or
435                          any combination of "pols", "ifs", "beams", "scans",
436                          "cycles", "name", "query"
437
438        Examples::
439
440            sel = selector()         # create a selection object
441            self.set_scans([0, 3])    # select SCANNO 0 and 3
442            scan.set_selection(sel)  # set the selection
443            scan.summary()           # will only print summary of scanno 0 an 3
444            scan.set_selection()     # unset the selection
445            # or the equivalent
446            scan.set_selection(scans=[0,3])
447            scan.summary()           # will only print summary of scanno 0 an 3
448            scan.set_selection()     # unset the selection
449
450        """
451        if selection is None:
452            # reset
453            if len(kw) == 0:
454                selection = selector()
455            else:
456                # try keywords
457                for k in kw:
458                    if k not in selector.fields:
459                        raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
460                selection = selector(**kw)
461        self._setselection(selection)
462
463    def get_row(self, row=0, insitu=None):
464        """\
465        Select a row in the scantable.
466        Return a scantable with single row.
467
468        Parameters:
469
470            row:    row no of integration, default is 0.
471            insitu: if False a new scantable is returned. Otherwise, the
472                    scaling is done in-situ. The default is taken from .asaprc
473                    (False)
474
475        """
476        if insitu is None: insitu = rcParams['insitu']
477        if not insitu:
478            workscan = self.copy()
479        else:
480            workscan = self
481        # Select a row
482        sel=selector()
483        sel.set_scans([workscan.getscan(row)])
484        sel.set_cycles([workscan.getcycle(row)])
485        sel.set_beams([workscan.getbeam(row)])
486        sel.set_ifs([workscan.getif(row)])
487        sel.set_polarisations([workscan.getpol(row)])
488        sel.set_name(workscan._getsourcename(row))
489        workscan.set_selection(sel)
490        if not workscan.nrow() == 1:
491            msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
492            raise RuntimeError(msg)
493        del sel
494        if insitu:
495            self._assign(workscan)
496        else:
497            return workscan
498
499    @asaplog_post_dec
500    def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
501        """\
502        Determine the specified statistic of the current beam/if/pol
503        Takes a 'mask' as an optional parameter to specify which
504        channels should be excluded.
505
506        Parameters:
507
508            stat:    'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
509                     'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
510
511            mask:    an optional mask specifying where the statistic
512                     should be determined.
513
514            form:    format string to print statistic values
515
516            row:     row number of spectrum to process.
517                     (default is None: for all rows)
518
519        Example:
520            scan.set_unit('channel')
521            msk = scan.create_mask([100, 200], [500, 600])
522            scan.stats(stat='mean', mask=m)
523
524        """
525        mask = mask or []
526        if not self._check_ifs():
527            raise ValueError("Cannot apply mask as the IFs have different "
528                             "number of channels. Please use setselection() "
529                             "to select individual IFs")
530        rtnabc = False
531        if stat.lower().endswith('_abc'): rtnabc = True
532        getchan = False
533        if stat.lower().startswith('min') or stat.lower().startswith('max'):
534            chan = self._math._minmaxchan(self, mask, stat)
535            getchan = True
536            statvals = []
537        if not rtnabc:
538            if row == None:
539                statvals = self._math._stats(self, mask, stat)
540            else:
541                statvals = self._math._statsrow(self, mask, stat, int(row))
542
543        #def cb(i):
544        #    return statvals[i]
545
546        #return self._row_callback(cb, stat)
547
548        label=stat
549        #callback=cb
550        out = ""
551        #outvec = []
552        sep = '-'*50
553
554        if row == None:
555            rows = xrange(self.nrow())
556        elif isinstance(row, int):
557            rows = [ row ]
558
559        for i in rows:
560            refstr = ''
561            statunit= ''
562            if getchan:
563                qx, qy = self.chan2data(rowno=i, chan=chan[i])
564                if rtnabc:
565                    statvals.append(qx['value'])
566                    refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
567                    statunit= '['+qx['unit']+']'
568                else:
569                    refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
570
571            tm = self._gettime(i)
572            src = self._getsourcename(i)
573            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
574            out += 'Time[%s]:\n' % (tm)
575            if self.nbeam(-1) > 1: out +=  ' Beam[%d] ' % (self.getbeam(i))
576            if self.nif(-1) > 1:   out +=  ' IF[%d] ' % (self.getif(i))
577            if self.npol(-1) > 1:  out +=  ' Pol[%d] ' % (self.getpol(i))
578            #outvec.append(callback(i))
579            if len(rows) > 1:
580                # out += ('= %'+form) % (outvec[i]) +'   '+refstr+'\n'
581                out += ('= %'+form) % (statvals[i]) +'   '+refstr+'\n'
582            else:
583                # out += ('= %'+form) % (outvec[0]) +'   '+refstr+'\n'
584                out += ('= %'+form) % (statvals[0]) +'   '+refstr+'\n'
585            out +=  sep+"\n"
586
587        import os
588        if os.environ.has_key( 'USER' ):
589            usr = os.environ['USER']
590        else:
591            import commands
592            usr = commands.getoutput( 'whoami' )
593        tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
594        f = open(tmpfile,'w')
595        print >> f, sep
596        print >> f, ' %s %s' % (label, statunit)
597        print >> f, sep
598        print >> f, out
599        f.close()
600        f = open(tmpfile,'r')
601        x = f.readlines()
602        f.close()
603        asaplog.push(''.join(x), False)
604
605        return statvals
606
607    def chan2data(self, rowno=0, chan=0):
608        """\
609        Returns channel/frequency/velocity and spectral value
610        at an arbitrary row and channel in the scantable.
611
612        Parameters:
613
614            rowno:   a row number in the scantable. Default is the
615                     first row, i.e. rowno=0
616
617            chan:    a channel in the scantable. Default is the first
618                     channel, i.e. pos=0
619
620        """
621        if isinstance(rowno, int) and isinstance(chan, int):
622            qx = {'unit': self.get_unit(),
623                  'value': self._getabcissa(rowno)[chan]}
624            qy = {'unit': self.get_fluxunit(),
625                  'value': self._getspectrum(rowno)[chan]}
626            return qx, qy
627
628    def stddev(self, mask=None):
629        """\
630        Determine the standard deviation of the current beam/if/pol
631        Takes a 'mask' as an optional parameter to specify which
632        channels should be excluded.
633
634        Parameters:
635
636            mask:    an optional mask specifying where the standard
637                     deviation should be determined.
638
639        Example::
640
641            scan.set_unit('channel')
642            msk = scan.create_mask([100, 200], [500, 600])
643            scan.stddev(mask=m)
644
645        """
646        return self.stats(stat='stddev', mask=mask);
647
648
649    def get_column_names(self):
650        """\
651        Return a  list of column names, which can be used for selection.
652        """
653        return list(Scantable.get_column_names(self))
654
655    def get_tsys(self, row=-1):
656        """\
657        Return the System temperatures.
658
659        Parameters:
660
661            row:    the rowno to get the information for. (default all rows)
662
663        Returns:
664
665            a list of Tsys values for the current selection
666
667        """
668        if row > -1:
669            return self._get_column(self._gettsys, row)
670        return self._row_callback(self._gettsys, "Tsys")
671
672
673    def get_weather(self, row=-1):
674        """\
675        Return the weather informations.
676
677        Parameters:
678
679            row:    the rowno to get the information for. (default all rows)
680
681        Returns:
682
683            a dict or list of of dicts of values for the current selection
684
685        """
686
687        values = self._get_column(self._get_weather, row)
688        if row > -1:
689            return {'temperature': values[0],
690                    'pressure': values[1], 'humidity' : values[2],
691                    'windspeed' : values[3], 'windaz' : values[4]
692                    }
693        else:
694            out = []
695            for r in values:
696
697                out.append({'temperature': r[0],
698                            'pressure': r[1], 'humidity' : r[2],
699                            'windspeed' : r[3], 'windaz' : r[4]
700                    })
701            return out
702
703    def _row_callback(self, callback, label):
704        out = ""
705        outvec = []
706        sep = '-'*50
707        for i in range(self.nrow()):
708            tm = self._gettime(i)
709            src = self._getsourcename(i)
710            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
711            out += 'Time[%s]:\n' % (tm)
712            if self.nbeam(-1) > 1:
713                out +=  ' Beam[%d] ' % (self.getbeam(i))
714            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (self.getif(i))
715            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (self.getpol(i))
716            outvec.append(callback(i))
717            out += '= %3.3f\n' % (outvec[i])
718            out +=  sep+'\n'
719
720        asaplog.push(sep)
721        asaplog.push(" %s" % (label))
722        asaplog.push(sep)
723        asaplog.push(out)
724        asaplog.post()
725        return outvec
726
727    def _get_column(self, callback, row=-1):
728        """
729        """
730        if row == -1:
731            return [callback(i) for i in range(self.nrow())]
732        else:
733            if  0 <= row < self.nrow():
734                return callback(row)
735
736
737    def get_time(self, row=-1, asdatetime=False):
738        """\
739        Get a list of time stamps for the observations.
740        Return a datetime object or a string (default) for each
741        integration time stamp in the scantable.
742
743        Parameters:
744
745            row:          row no of integration. Default -1 return all rows
746
747            asdatetime:   return values as datetime objects rather than strings
748
749        """
750        from time import strptime
751        from datetime import datetime
752        times = self._get_column(self._gettime, row)
753        if not asdatetime:
754            return times
755        format = "%Y/%m/%d/%H:%M:%S"
756        if isinstance(times, list):
757            return [datetime(*strptime(i, format)[:6]) for i in times]
758        else:
759            return datetime(*strptime(times, format)[:6])
760
761
762    def get_inttime(self, row=-1):
763        """\
764        Get a list of integration times for the observations.
765        Return a time in seconds for each integration in the scantable.
766
767        Parameters:
768
769            row:    row no of integration. Default -1 return all rows.
770
771        """
772        return self._get_column(self._getinttime, row)
773
774
775    def get_sourcename(self, row=-1):
776        """\
777        Get a list source names for the observations.
778        Return a string for each integration in the scantable.
779        Parameters:
780
781            row:    row no of integration. Default -1 return all rows.
782
783        """
784        return self._get_column(self._getsourcename, row)
785
786    def get_elevation(self, row=-1):
787        """\
788        Get a list of elevations for the observations.
789        Return a float for each integration in the scantable.
790
791        Parameters:
792
793            row:    row no of integration. Default -1 return all rows.
794
795        """
796        return self._get_column(self._getelevation, row)
797
798    def get_azimuth(self, row=-1):
799        """\
800        Get a list of azimuths for the observations.
801        Return a float for each integration in the scantable.
802
803        Parameters:
804            row:    row no of integration. Default -1 return all rows.
805
806        """
807        return self._get_column(self._getazimuth, row)
808
809    def get_parangle(self, row=-1):
810        """\
811        Get a list of parallactic angles for the observations.
812        Return a float for each integration in the scantable.
813
814        Parameters:
815
816            row:    row no of integration. Default -1 return all rows.
817
818        """
819        return self._get_column(self._getparangle, row)
820
821    def get_direction(self, row=-1):
822        """
823        Get a list of Positions on the sky (direction) for the observations.
824        Return a string for each integration in the scantable.
825
826        Parameters:
827
828            row:    row no of integration. Default -1 return all rows
829
830        """
831        return self._get_column(self._getdirection, row)
832
833    def get_directionval(self, row=-1):
834        """\
835        Get a list of Positions on the sky (direction) for the observations.
836        Return a float for each integration in the scantable.
837
838        Parameters:
839
840            row:    row no of integration. Default -1 return all rows
841
842        """
843        return self._get_column(self._getdirectionvec, row)
844
845    @asaplog_post_dec
846    def set_unit(self, unit='channel'):
847        """\
848        Set the unit for all following operations on this scantable
849
850        Parameters:
851
852            unit:    optional unit, default is 'channel'. Use one of '*Hz',
853                     'km/s', 'channel' or equivalent ''
854
855        """
856        varlist = vars()
857        if unit in ['', 'pixel', 'channel']:
858            unit = ''
859        inf = list(self._getcoordinfo())
860        inf[0] = unit
861        self._setcoordinfo(inf)
862        self._add_history("set_unit", varlist)
863
864    @asaplog_post_dec
865    def set_instrument(self, instr):
866        """\
867        Set the instrument for subsequent processing.
868
869        Parameters:
870
871            instr:    Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
872                      'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
873
874        """
875        self._setInstrument(instr)
876        self._add_history("set_instument", vars())
877
878    @asaplog_post_dec
879    def set_feedtype(self, feedtype):
880        """\
881        Overwrite the feed type, which might not be set correctly.
882
883        Parameters:
884
885            feedtype:     'linear' or 'circular'
886
887        """
888        self._setfeedtype(feedtype)
889        self._add_history("set_feedtype", vars())
890
891    @asaplog_post_dec
892    def set_doppler(self, doppler='RADIO'):
893        """\
894        Set the doppler for all following operations on this scantable.
895
896        Parameters:
897
898            doppler:    One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
899
900        """
901        varlist = vars()
902        inf = list(self._getcoordinfo())
903        inf[2] = doppler
904        self._setcoordinfo(inf)
905        self._add_history("set_doppler", vars())
906
907    @asaplog_post_dec
908    def set_freqframe(self, frame=None):
909        """\
910        Set the frame type of the Spectral Axis.
911
912        Parameters:
913
914            frame:   an optional frame type, default 'LSRK'. Valid frames are:
915                     'TOPO', 'LSRD', 'LSRK', 'BARY',
916                     'GEO', 'GALACTO', 'LGROUP', 'CMB'
917
918        Example::
919
920            scan.set_freqframe('BARY')
921
922        """
923        frame = frame or rcParams['scantable.freqframe']
924        varlist = vars()
925        # "REST" is not implemented in casacore
926        #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
927        #           'GEO', 'GALACTO', 'LGROUP', 'CMB']
928        valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
929                   'GEO', 'GALACTO', 'LGROUP', 'CMB']
930
931        if frame in valid:
932            inf = list(self._getcoordinfo())
933            inf[1] = frame
934            self._setcoordinfo(inf)
935            self._add_history("set_freqframe", varlist)
936        else:
937            msg  = "Please specify a valid freq type. Valid types are:\n", valid
938            raise TypeError(msg)
939
940    @asaplog_post_dec
941    def set_dirframe(self, frame=""):
942        """\
943        Set the frame type of the Direction on the sky.
944
945        Parameters:
946
947            frame:   an optional frame type, default ''. Valid frames are:
948                     'J2000', 'B1950', 'GALACTIC'
949
950        Example:
951
952            scan.set_dirframe('GALACTIC')
953
954        """
955        varlist = vars()
956        Scantable.set_dirframe(self, frame)
957        self._add_history("set_dirframe", varlist)
958
959    def get_unit(self):
960        """\
961        Get the default unit set in this scantable
962
963        Returns:
964
965            A unit string
966
967        """
968        inf = self._getcoordinfo()
969        unit = inf[0]
970        if unit == '': unit = 'channel'
971        return unit
972
973    @asaplog_post_dec
974    def get_abcissa(self, rowno=0):
975        """\
976        Get the abcissa in the current coordinate setup for the currently
977        selected Beam/IF/Pol
978
979        Parameters:
980
981            rowno:    an optional row number in the scantable. Default is the
982                      first row, i.e. rowno=0
983
984        Returns:
985
986            The abcissa values and the format string (as a dictionary)
987
988        """
989        abc = self._getabcissa(rowno)
990        lbl = self._getabcissalabel(rowno)
991        return abc, lbl
992
993    @asaplog_post_dec
994    def flag(self, mask=None, unflag=False):
995        """\
996        Flag the selected data using an optional channel mask.
997
998        Parameters:
999
1000            mask:   an optional channel mask, created with create_mask. Default
1001                    (no mask) is all channels.
1002
1003            unflag:    if True, unflag the data
1004
1005        """
1006        varlist = vars()
1007        mask = mask or []
1008        self._flag(mask, unflag)
1009        self._add_history("flag", varlist)
1010
1011    @asaplog_post_dec
1012    def flag_row(self, rows=[], unflag=False):
1013        """\
1014        Flag the selected data in row-based manner.
1015
1016        Parameters:
1017
1018            rows:   list of row numbers to be flagged. Default is no row
1019                    (must be explicitly specified to execute row-based flagging).
1020
1021            unflag: if True, unflag the data.
1022
1023        """
1024        varlist = vars()
1025        self._flag_row(rows, unflag)
1026        self._add_history("flag_row", varlist)
1027
1028    @asaplog_post_dec
1029    def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
1030        """\
1031        Flag the selected data outside a specified range (in channel-base)
1032
1033        Parameters:
1034
1035            uthres:      upper threshold.
1036
1037            dthres:      lower threshold
1038
1039            clipoutside: True for flagging data outside the range [dthres:uthres].
1040                         False for flagging data inside the range.
1041
1042            unflag:      if True, unflag the data.
1043
1044        """
1045        varlist = vars()
1046        self._clip(uthres, dthres, clipoutside, unflag)
1047        self._add_history("clip", varlist)
1048
1049    @asaplog_post_dec
1050    def lag_flag(self, start, end, unit="MHz", insitu=None):
1051        """\
1052        Flag the data in 'lag' space by providing a frequency to remove.
1053        Flagged data in the scantable gets interpolated over the region.
1054        No taper is applied.
1055
1056        Parameters:
1057
1058            start:    the start frequency (really a period within the
1059                      bandwidth)  or period to remove
1060
1061            end:      the end frequency or period to remove
1062
1063            unit:     the frequency unit (default "MHz") or "" for
1064                      explicit lag channels
1065
1066        *Notes*:
1067
1068            It is recommended to flag edges of the band or strong
1069            signals beforehand.
1070
1071        """
1072        if insitu is None: insitu = rcParams['insitu']
1073        self._math._setinsitu(insitu)
1074        varlist = vars()
1075        base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1076        if not (unit == "" or base.has_key(unit)):
1077            raise ValueError("%s is not a valid unit." % unit)
1078        if unit == "":
1079            s = scantable(self._math._lag_flag(self, start, end, "lags"))
1080        else:
1081            s = scantable(self._math._lag_flag(self, start*base[unit],
1082                                               end*base[unit], "frequency"))
1083        s._add_history("lag_flag", varlist)
1084        if insitu:
1085            self._assign(s)
1086        else:
1087            return s
1088
1089    @asaplog_post_dec
1090    def create_mask(self, *args, **kwargs):
1091        """\
1092        Compute and return a mask based on [min, max] windows.
1093        The specified windows are to be INCLUDED, when the mask is
1094        applied.
1095
1096        Parameters:
1097
1098            [min, max], [min2, max2], ...
1099                Pairs of start/end points (inclusive)specifying the regions
1100                to be masked
1101
1102            invert:     optional argument. If specified as True,
1103                        return an inverted mask, i.e. the regions
1104                        specified are EXCLUDED
1105
1106            row:        create the mask using the specified row for
1107                        unit conversions, default is row=0
1108                        only necessary if frequency varies over rows.
1109
1110        Examples::
1111
1112            scan.set_unit('channel')
1113            # a)
1114            msk = scan.create_mask([400, 500], [800, 900])
1115            # masks everything outside 400 and 500
1116            # and 800 and 900 in the unit 'channel'
1117
1118            # b)
1119            msk = scan.create_mask([400, 500], [800, 900], invert=True)
1120            # masks the regions between 400 and 500
1121            # and 800 and 900 in the unit 'channel'
1122
1123            # c)
1124            #mask only channel 400
1125            msk =  scan.create_mask([400])
1126
1127        """
1128        row = kwargs.get("row", 0)
1129        data = self._getabcissa(row)
1130        u = self._getcoordinfo()[0]
1131        if u == "":
1132            u = "channel"
1133        msg = "The current mask window unit is %s" % u
1134        i = self._check_ifs()
1135        if not i:
1136            msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1137        asaplog.push(msg)
1138        n = self.nchan()
1139        msk = _n_bools(n, False)
1140        # test if args is a 'list' or a 'normal *args - UGLY!!!
1141
1142        ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1143             and args or args[0]
1144        for window in ws:
1145            if len(window) == 1:
1146                window = [window[0], window[0]]
1147            if len(window) == 0 or  len(window) > 2:
1148                raise ValueError("A window needs to be defined as [start(, end)]")
1149            if window[0] > window[1]:
1150                tmp = window[0]
1151                window[0] = window[1]
1152                window[1] = tmp
1153            for i in range(n):
1154                if data[i] >= window[0] and data[i] <= window[1]:
1155                    msk[i] = True
1156        if kwargs.has_key('invert'):
1157            if kwargs.get('invert'):
1158                msk = mask_not(msk)
1159        return msk
1160
1161    def get_masklist(self, mask=None, row=0):
1162        """\
1163        Compute and return a list of mask windows, [min, max].
1164
1165        Parameters:
1166
1167            mask:       channel mask, created with create_mask.
1168
1169            row:        calcutate the masklist using the specified row
1170                        for unit conversions, default is row=0
1171                        only necessary if frequency varies over rows.
1172
1173        Returns:
1174
1175            [min, max], [min2, max2], ...
1176                Pairs of start/end points (inclusive)specifying
1177                the masked regions
1178
1179        """
1180        if not (isinstance(mask,list) or isinstance(mask, tuple)):
1181            raise TypeError("The mask should be list or tuple.")
1182        if len(mask) < 2:
1183            raise TypeError("The mask elements should be > 1")
1184        if self.nchan() != len(mask):
1185            msg = "Number of channels in scantable != number of mask elements"
1186            raise TypeError(msg)
1187        data = self._getabcissa(row)
1188        u = self._getcoordinfo()[0]
1189        if u == "":
1190            u = "channel"
1191        msg = "The current mask window unit is %s" % u
1192        i = self._check_ifs()
1193        if not i:
1194            msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1195        asaplog.push(msg)
1196        masklist=[]
1197        ist, ien = None, None
1198        ist, ien=self.get_mask_indices(mask)
1199        if ist is not None and ien is not None:
1200            for i in xrange(len(ist)):
1201                range=[data[ist[i]],data[ien[i]]]
1202                range.sort()
1203                masklist.append([range[0],range[1]])
1204        return masklist
1205
1206    def get_mask_indices(self, mask=None):
1207        """\
1208        Compute and Return lists of mask start indices and mask end indices.
1209
1210        Parameters:
1211
1212            mask:       channel mask, created with create_mask.
1213
1214        Returns:
1215
1216            List of mask start indices and that of mask end indices,
1217            i.e., [istart1,istart2,....], [iend1,iend2,....].
1218
1219        """
1220        if not (isinstance(mask,list) or isinstance(mask, tuple)):
1221            raise TypeError("The mask should be list or tuple.")
1222        if len(mask) < 2:
1223            raise TypeError("The mask elements should be > 1")
1224        istart=[]
1225        iend=[]
1226        if mask[0]: istart.append(0)
1227        for i in range(len(mask)-1):
1228            if not mask[i] and mask[i+1]:
1229                istart.append(i+1)
1230            elif mask[i] and not mask[i+1]:
1231                iend.append(i)
1232        if mask[len(mask)-1]: iend.append(len(mask)-1)
1233        if len(istart) != len(iend):
1234            raise RuntimeError("Numbers of mask start != mask end.")
1235        for i in range(len(istart)):
1236            if istart[i] > iend[i]:
1237                raise RuntimeError("Mask start index > mask end index")
1238                break
1239        return istart,iend
1240
1241#    def get_restfreqs(self):
1242#        """
1243#        Get the restfrequency(s) stored in this scantable.
1244#        The return value(s) are always of unit 'Hz'
1245#        Parameters:
1246#            none
1247#        Returns:
1248#            a list of doubles
1249#        """
1250#        return list(self._getrestfreqs())
1251
1252    def get_restfreqs(self, ids=None):
1253        """\
1254        Get the restfrequency(s) stored in this scantable.
1255        The return value(s) are always of unit 'Hz'
1256
1257        Parameters:
1258
1259            ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1260                 be retrieved
1261
1262        Returns:
1263
1264            dictionary containing ids and a list of doubles for each id
1265
1266        """
1267        if ids is None:
1268            rfreqs={}
1269            idlist = self.getmolnos()
1270            for i in idlist:
1271                rfreqs[i]=list(self._getrestfreqs(i))
1272            return rfreqs
1273        else:
1274            if type(ids)==list or type(ids)==tuple:
1275                rfreqs={}
1276                for i in ids:
1277                    rfreqs[i]=list(self._getrestfreqs(i))
1278                return rfreqs
1279            else:
1280                return list(self._getrestfreqs(ids))
1281            #return list(self._getrestfreqs(ids))
1282
1283    def set_restfreqs(self, freqs=None, unit='Hz'):
1284        """\
1285        Set or replace the restfrequency specified and
1286        if the 'freqs' argument holds a scalar,
1287        then that rest frequency will be applied to all the selected
1288        data.  If the 'freqs' argument holds
1289        a vector, then it MUST be of equal or smaller length than
1290        the number of IFs (and the available restfrequencies will be
1291        replaced by this vector).  In this case, *all* data have
1292        the restfrequency set per IF according
1293        to the corresponding value you give in the 'freqs' vector.
1294        E.g. 'freqs=[1e9, 2e9]'  would mean IF 0 gets restfreq 1e9 and
1295        IF 1 gets restfreq 2e9.
1296
1297        You can also specify the frequencies via a linecatalog.
1298
1299        Parameters:
1300
1301            freqs:   list of rest frequency values or string idenitfiers
1302
1303            unit:    unit for rest frequency (default 'Hz')
1304
1305
1306        Example::
1307
1308            # set the given restfrequency for the all currently selected IFs
1309            scan.set_restfreqs(freqs=1.4e9)
1310            # set restfrequencies for the n IFs  (n > 1) in the order of the
1311            # list, i.e
1312            # IF0 -> 1.4e9, IF1 ->  1.41e9, IF3 -> 1.42e9
1313            # len(list_of_restfreqs) == nIF
1314            # for nIF == 1 the following will set multiple restfrequency for
1315            # that IF
1316            scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
1317            # set multiple restfrequencies per IF. as a list of lists where
1318            # the outer list has nIF elements, the inner s arbitrary
1319            scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
1320
1321       *Note*:
1322
1323            To do more sophisticate Restfrequency setting, e.g. on a
1324            source and IF basis, use scantable.set_selection() before using
1325            this function::
1326
1327                # provided your scantable is called scan
1328                selection = selector()
1329                selection.set_name("ORION*")
1330                selection.set_ifs([1])
1331                scan.set_selection(selection)
1332                scan.set_restfreqs(freqs=86.6e9)
1333
1334        """
1335        varlist = vars()
1336        from asap import linecatalog
1337        # simple  value
1338        if isinstance(freqs, int) or isinstance(freqs, float):
1339            self._setrestfreqs([freqs], [""], unit)
1340        # list of values
1341        elif isinstance(freqs, list) or isinstance(freqs, tuple):
1342            # list values are scalars
1343            if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
1344                if len(freqs) == 1:
1345                    self._setrestfreqs(freqs, [""], unit)
1346                else:
1347                    # allow the 'old' mode of setting mulitple IFs
1348                    sel = selector()
1349                    savesel = self._getselection()
1350                    iflist = self.getifnos()
1351                    if len(freqs)>len(iflist):
1352                        raise ValueError("number of elements in list of list "
1353                                         "exeeds the current IF selections")
1354                    iflist = self.getifnos()
1355                    for i, fval in enumerate(freqs):
1356                        sel.set_ifs(iflist[i])
1357                        self._setselection(sel)
1358                        self._setrestfreqs([fval], [""], unit)
1359                    self._setselection(savesel)
1360
1361            # list values are dict, {'value'=, 'name'=)
1362            elif isinstance(freqs[-1], dict):
1363                values = []
1364                names = []
1365                for d in freqs:
1366                    values.append(d["value"])
1367                    names.append(d["name"])
1368                self._setrestfreqs(values, names, unit)
1369            elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
1370                sel = selector()
1371                savesel = self._getselection()
1372                iflist = self.getifnos()
1373                if len(freqs)>len(iflist):
1374                    raise ValueError("number of elements in list of list exeeds"
1375                                     " the current IF selections")
1376                for i, fval in enumerate(freqs):
1377                    sel.set_ifs(iflist[i])
1378                    self._setselection(sel)
1379                    self._setrestfreqs(fval, [""], unit)
1380                self._setselection(savesel)
1381        # freqs are to be taken from a linecatalog
1382        elif isinstance(freqs, linecatalog):
1383            sel = selector()
1384            savesel = self._getselection()
1385            for i in xrange(freqs.nrow()):
1386                sel.set_ifs(iflist[i])
1387                self._setselection(sel)
1388                self._setrestfreqs([freqs.get_frequency(i)],
1389                                   [freqs.get_name(i)], "MHz")
1390                # ensure that we are not iterating past nIF
1391                if i == self.nif()-1: break
1392            self._setselection(savesel)
1393        else:
1394            return
1395        self._add_history("set_restfreqs", varlist)
1396
1397    def shift_refpix(self, delta):
1398        """\
1399        Shift the reference pixel of the Spectra Coordinate by an
1400        integer amount.
1401
1402        Parameters:
1403
1404            delta:   the amount to shift by
1405
1406        *Note*:
1407
1408            Be careful using this with broadband data.
1409
1410        """
1411        Scantable.shift_refpix(self, delta)
1412
1413    @asaplog_post_dec
1414    def history(self, filename=None):
1415        """\
1416        Print the history. Optionally to a file.
1417
1418        Parameters:
1419
1420            filename:    The name of the file to save the history to.
1421
1422        """
1423        hist = list(self._gethistory())
1424        out = "-"*80
1425        for h in hist:
1426            if h.startswith("---"):
1427                out = "\n".join([out, h])
1428            else:
1429                items = h.split("##")
1430                date = items[0]
1431                func = items[1]
1432                items = items[2:]
1433                out += "\n"+date+"\n"
1434                out += "Function: %s\n  Parameters:" % (func)
1435                for i in items:
1436                    if i == '':
1437                        continue
1438                    s = i.split("=")
1439                    out += "\n   %s = %s" % (s[0], s[1])
1440                out = "\n".join([out, "-"*80])
1441        if filename is not None:
1442            if filename is "":
1443                filename = 'scantable_history.txt'
1444            import os
1445            filename = os.path.expandvars(os.path.expanduser(filename))
1446            if not os.path.isdir(filename):
1447                data = open(filename, 'w')
1448                data.write(out)
1449                data.close()
1450            else:
1451                msg = "Illegal file name '%s'." % (filename)
1452                raise IOError(msg)
1453        return page(out)
1454    #
1455    # Maths business
1456    #
1457    @asaplog_post_dec
1458    def average_time(self, mask=None, scanav=False, weight='tint', align=False):
1459        """\
1460        Return the (time) weighted average of a scan.
1461
1462        *Note*:
1463
1464            in channels only - align if necessary
1465
1466        Parameters:
1467
1468            mask:     an optional mask (only used for 'var' and 'tsys'
1469                      weighting)
1470
1471            scanav:   True averages each scan separately
1472                      False (default) averages all scans together,
1473
1474            weight:   Weighting scheme.
1475                      'none'     (mean no weight)
1476                      'var'      (1/var(spec) weighted)
1477                      'tsys'     (1/Tsys**2 weighted)
1478                      'tint'     (integration time weighted)
1479                      'tintsys'  (Tint/Tsys**2)
1480                      'median'   ( median averaging)
1481                      The default is 'tint'
1482
1483            align:    align the spectra in velocity before averaging. It takes
1484                      the time of the first spectrum as reference time.
1485
1486        Example::
1487
1488            # time average the scantable without using a mask
1489            newscan = scan.average_time()
1490
1491        """
1492        varlist = vars()
1493        weight = weight or 'TINT'
1494        mask = mask or ()
1495        scanav = (scanav and 'SCAN') or 'NONE'
1496        scan = (self, )
1497
1498        if align:
1499            scan = (self.freq_align(insitu=False), )
1500        s = None
1501        if weight.upper() == 'MEDIAN':
1502            s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1503                                                     scanav))
1504        else:
1505            s = scantable(self._math._average(scan, mask, weight.upper(),
1506                          scanav))
1507        s._add_history("average_time", varlist)
1508        return s
1509
1510    @asaplog_post_dec
1511    def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
1512        """\
1513        Return a scan where all spectra are converted to either
1514        Jansky or Kelvin depending upon the flux units of the scan table.
1515        By default the function tries to look the values up internally.
1516        If it can't find them (or if you want to over-ride), you must
1517        specify EITHER jyperk OR eta (and D which it will try to look up
1518        also if you don't set it). jyperk takes precedence if you set both.
1519
1520        Parameters:
1521
1522            jyperk:      the Jy / K conversion factor
1523
1524            eta:         the aperture efficiency
1525
1526            d:           the geometric diameter (metres)
1527
1528            insitu:      if False a new scantable is returned.
1529                         Otherwise, the scaling is done in-situ
1530                         The default is taken from .asaprc (False)
1531
1532        """
1533        if insitu is None: insitu = rcParams['insitu']
1534        self._math._setinsitu(insitu)
1535        varlist = vars()
1536        jyperk = jyperk or -1.0
1537        d = d or -1.0
1538        eta = eta or -1.0
1539        s = scantable(self._math._convertflux(self, d, eta, jyperk))
1540        s._add_history("convert_flux", varlist)
1541        if insitu: self._assign(s)
1542        else: return s
1543
1544    @asaplog_post_dec
1545    def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1546        """\
1547        Return a scan after applying a gain-elevation correction.
1548        The correction can be made via either a polynomial or a
1549        table-based interpolation (and extrapolation if necessary).
1550        You specify polynomial coefficients, an ascii table or neither.
1551        If you specify neither, then a polynomial correction will be made
1552        with built in coefficients known for certain telescopes (an error
1553        will occur if the instrument is not known).
1554        The data and Tsys are *divided* by the scaling factors.
1555
1556        Parameters:
1557
1558            poly:        Polynomial coefficients (default None) to compute a
1559                         gain-elevation correction as a function of
1560                         elevation (in degrees).
1561
1562            filename:    The name of an ascii file holding correction factors.
1563                         The first row of the ascii file must give the column
1564                         names and these MUST include columns
1565                         "ELEVATION" (degrees) and "FACTOR" (multiply data
1566                         by this) somewhere.
1567                         The second row must give the data type of the
1568                         column. Use 'R' for Real and 'I' for Integer.
1569                         An example file would be
1570                         (actual factors are arbitrary) :
1571
1572                         TIME ELEVATION FACTOR
1573                         R R R
1574                         0.1 0 0.8
1575                         0.2 20 0.85
1576                         0.3 40 0.9
1577                         0.4 60 0.85
1578                         0.5 80 0.8
1579                         0.6 90 0.75
1580
1581            method:      Interpolation method when correcting from a table.
1582                         Values are  "nearest", "linear" (default), "cubic"
1583                         and "spline"
1584
1585            insitu:      if False a new scantable is returned.
1586                         Otherwise, the scaling is done in-situ
1587                         The default is taken from .asaprc (False)
1588
1589        """
1590
1591        if insitu is None: insitu = rcParams['insitu']
1592        self._math._setinsitu(insitu)
1593        varlist = vars()
1594        poly = poly or ()
1595        from os.path import expandvars
1596        filename = expandvars(filename)
1597        s = scantable(self._math._gainel(self, poly, filename, method))
1598        s._add_history("gain_el", varlist)
1599        if insitu:
1600            self._assign(s)
1601        else:
1602            return s
1603
1604    @asaplog_post_dec
1605    def freq_align(self, reftime=None, method='cubic', insitu=None):
1606        """\
1607        Return a scan where all rows have been aligned in frequency/velocity.
1608        The alignment frequency frame (e.g. LSRK) is that set by function
1609        set_freqframe.
1610
1611        Parameters:
1612
1613            reftime:     reference time to align at. By default, the time of
1614                         the first row of data is used.
1615
1616            method:      Interpolation method for regridding the spectra.
1617                         Choose from "nearest", "linear", "cubic" (default)
1618                         and "spline"
1619
1620            insitu:      if False a new scantable is returned.
1621                         Otherwise, the scaling is done in-situ
1622                         The default is taken from .asaprc (False)
1623
1624        """
1625        if insitu is None: insitu = rcParams["insitu"]
1626        self._math._setinsitu(insitu)
1627        varlist = vars()
1628        reftime = reftime or ""
1629        s = scantable(self._math._freq_align(self, reftime, method))
1630        s._add_history("freq_align", varlist)
1631        if insitu: self._assign(s)
1632        else: return s
1633
1634    @asaplog_post_dec
1635    def opacity(self, tau=None, insitu=None):
1636        """\
1637        Apply an opacity correction. The data
1638        and Tsys are multiplied by the correction factor.
1639
1640        Parameters:
1641
1642            tau:         (list of) opacity from which the correction factor is
1643                         exp(tau*ZD)
1644                         where ZD is the zenith-distance.
1645                         If a list is provided, it has to be of length nIF,
1646                         nIF*nPol or 1 and in order of IF/POL, e.g.
1647                         [opif0pol0, opif0pol1, opif1pol0 ...]
1648                         if tau is `None` the opacities are determined from a
1649                         model.
1650
1651            insitu:      if False a new scantable is returned.
1652                         Otherwise, the scaling is done in-situ
1653                         The default is taken from .asaprc (False)
1654
1655        """
1656        if insitu is None: insitu = rcParams['insitu']
1657        self._math._setinsitu(insitu)
1658        varlist = vars()
1659        if not hasattr(tau, "__len__"):
1660            tau = [tau]
1661        s = scantable(self._math._opacity(self, tau))
1662        s._add_history("opacity", varlist)
1663        if insitu: self._assign(s)
1664        else: return s
1665
1666    @asaplog_post_dec
1667    def bin(self, width=5, insitu=None):
1668        """\
1669        Return a scan where all spectra have been binned up.
1670
1671        Parameters:
1672
1673            width:       The bin width (default=5) in pixels
1674
1675            insitu:      if False a new scantable is returned.
1676                         Otherwise, the scaling is done in-situ
1677                         The default is taken from .asaprc (False)
1678
1679        """
1680        if insitu is None: insitu = rcParams['insitu']
1681        self._math._setinsitu(insitu)
1682        varlist = vars()
1683        s = scantable(self._math._bin(self, width))
1684        s._add_history("bin", varlist)
1685        if insitu:
1686            self._assign(s)
1687        else:
1688            return s
1689
1690    @asaplog_post_dec
1691    def resample(self, width=5, method='cubic', insitu=None):
1692        """\
1693        Return a scan where all spectra have been binned up.
1694
1695        Parameters:
1696
1697            width:       The bin width (default=5) in pixels
1698
1699            method:      Interpolation method when correcting from a table.
1700                         Values are  "nearest", "linear", "cubic" (default)
1701                         and "spline"
1702
1703            insitu:      if False a new scantable is returned.
1704                         Otherwise, the scaling is done in-situ
1705                         The default is taken from .asaprc (False)
1706
1707        """
1708        if insitu is None: insitu = rcParams['insitu']
1709        self._math._setinsitu(insitu)
1710        varlist = vars()
1711        s = scantable(self._math._resample(self, method, width))
1712        s._add_history("resample", varlist)
1713        if insitu: self._assign(s)
1714        else: return s
1715
1716    @asaplog_post_dec
1717    def average_pol(self, mask=None, weight='none'):
1718        """\
1719        Average the Polarisations together.
1720
1721        Parameters:
1722
1723            mask:        An optional mask defining the region, where the
1724                         averaging will be applied. The output will have all
1725                         specified points masked.
1726
1727            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1728                         weighted), or 'tsys' (1/Tsys**2 weighted)
1729
1730        """
1731        varlist = vars()
1732        mask = mask or ()
1733        s = scantable(self._math._averagepol(self, mask, weight.upper()))
1734        s._add_history("average_pol", varlist)
1735        return s
1736
1737    @asaplog_post_dec
1738    def average_beam(self, mask=None, weight='none'):
1739        """\
1740        Average the Beams together.
1741
1742        Parameters:
1743            mask:        An optional mask defining the region, where the
1744                         averaging will be applied. The output will have all
1745                         specified points masked.
1746
1747            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1748                         weighted), or 'tsys' (1/Tsys**2 weighted)
1749
1750        """
1751        varlist = vars()
1752        mask = mask or ()
1753        s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1754        s._add_history("average_beam", varlist)
1755        return s
1756
1757    def parallactify(self, pflag):
1758        """\
1759        Set a flag to indicate whether this data should be treated as having
1760        been 'parallactified' (total phase == 0.0)
1761
1762        Parameters:
1763
1764            pflag:  Bool indicating whether to turn this on (True) or
1765                    off (False)
1766
1767        """
1768        varlist = vars()
1769        self._parallactify(pflag)
1770        self._add_history("parallactify", varlist)
1771
1772    @asaplog_post_dec
1773    def convert_pol(self, poltype=None):
1774        """\
1775        Convert the data to a different polarisation type.
1776        Note that you will need cross-polarisation terms for most conversions.
1777
1778        Parameters:
1779
1780            poltype:    The new polarisation type. Valid types are:
1781                        "linear", "circular", "stokes" and "linpol"
1782
1783        """
1784        varlist = vars()
1785        s = scantable(self._math._convertpol(self, poltype))
1786        s._add_history("convert_pol", varlist)
1787        return s
1788
1789    @asaplog_post_dec
1790    def smooth(self, kernel="hanning", width=5.0, order=2, plot=False, insitu=None):
1791        """\
1792        Smooth the spectrum by the specified kernel (conserving flux).
1793
1794        Parameters:
1795
1796            kernel:     The type of smoothing kernel. Select from
1797                        'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1798                        or 'poly'
1799
1800            width:      The width of the kernel in pixels. For hanning this is
1801                        ignored otherwise it defauls to 5 pixels.
1802                        For 'gaussian' it is the Full Width Half
1803                        Maximum. For 'boxcar' it is the full width.
1804                        For 'rmedian' and 'poly' it is the half width.
1805
1806            order:      Optional parameter for 'poly' kernel (default is 2), to
1807                        specify the order of the polnomial. Ignored by all other
1808                        kernels.
1809
1810            plot:       plot the original and the smoothed spectra.
1811                        In this each indivual fit has to be approved, by
1812                        typing 'y' or 'n'
1813
1814            insitu:     if False a new scantable is returned.
1815                        Otherwise, the scaling is done in-situ
1816                        The default is taken from .asaprc (False)
1817
1818        """
1819        if insitu is None: insitu = rcParams['insitu']
1820        self._math._setinsitu(insitu)
1821        varlist = vars()
1822
1823        if plot: orgscan = self.copy()
1824
1825        s = scantable(self._math._smooth(self, kernel.lower(), width, order))
1826        s._add_history("smooth", varlist)
1827
1828        if plot:
1829            if rcParams['plotter.gui']:
1830                from asap.asaplotgui import asaplotgui as asaplot
1831            else:
1832                from asap.asaplot import asaplot
1833            self._p=asaplot()
1834            self._p.set_panels()
1835            ylab=s._get_ordinate_label()
1836            #self._p.palette(0,["#777777","red"])
1837            for r in xrange(s.nrow()):
1838                xsm=s._getabcissa(r)
1839                ysm=s._getspectrum(r)
1840                xorg=orgscan._getabcissa(r)
1841                yorg=orgscan._getspectrum(r)
1842                self._p.clear()
1843                self._p.hold()
1844                self._p.set_axes('ylabel',ylab)
1845                self._p.set_axes('xlabel',s._getabcissalabel(r))
1846                self._p.set_axes('title',s._getsourcename(r))
1847                self._p.set_line(label='Original',color="#777777")
1848                self._p.plot(xorg,yorg)
1849                self._p.set_line(label='Smoothed',color="red")
1850                self._p.plot(xsm,ysm)
1851                ### Ugly part for legend
1852                for i in [0,1]:
1853                    self._p.subplots[0]['lines'].append([self._p.subplots[0]['axes'].lines[i]])
1854                self._p.release()
1855                ### Ugly part for legend
1856                self._p.subplots[0]['lines']=[]
1857                res = raw_input("Accept smoothing ([y]/n): ")
1858                if res.upper() == 'N':
1859                    s._setspectrum(yorg, r)
1860            self._p.unmap()
1861            self._p = None
1862            del orgscan
1863
1864        if insitu: self._assign(s)
1865        else: return s
1866
1867    @asaplog_post_dec
1868    def old_poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None, rows=None):
1869        """\
1870        Return a scan which has been baselined (all rows) by a polynomial.
1871       
1872        Parameters:
1873
1874            mask:       an optional mask
1875
1876            order:      the order of the polynomial (default is 0)
1877
1878            plot:       plot the fit and the residual. In this each
1879                        indivual fit has to be approved, by typing 'y'
1880                        or 'n'
1881
1882            uselin:     use linear polynomial fit
1883
1884            insitu:     if False a new scantable is returned.
1885                        Otherwise, the scaling is done in-situ
1886                        The default is taken from .asaprc (False)
1887
1888            rows:       row numbers of spectra to be processed.
1889                        (default is None: for all rows)
1890       
1891        Example:
1892            # return a scan baselined by a third order polynomial,
1893            # not using a mask
1894            bscan = scan.poly_baseline(order=3)
1895
1896        """
1897        if insitu is None: insitu = rcParams['insitu']
1898        if not insitu:
1899            workscan = self.copy()
1900        else:
1901            workscan = self
1902        varlist = vars()
1903        if mask is None:
1904            mask = [True for i in xrange(self.nchan())]
1905
1906        try:
1907            f = fitter()
1908            if uselin:
1909                f.set_function(lpoly=order)
1910            else:
1911                f.set_function(poly=order)
1912
1913            if rows == None:
1914                rows = xrange(workscan.nrow())
1915            elif isinstance(rows, int):
1916                rows = [ rows ]
1917           
1918            if len(rows) > 0:
1919                self.blpars = []
1920                self.masklists = []
1921                self.actualmask = []
1922           
1923            for r in rows:
1924                f.x = workscan._getabcissa(r)
1925                f.y = workscan._getspectrum(r)
1926                f.mask = mask_and(mask, workscan._getmask(r))    # (CAS-1434)
1927                f.data = None
1928                f.fit()
1929                if plot:
1930                    f.plot(residual=True)
1931                    x = raw_input("Accept fit ( [y]/n ): ")
1932                    if x.upper() == 'N':
1933                        self.blpars.append(None)
1934                        self.masklists.append(None)
1935                        self.actualmask.append(None)
1936                        continue
1937                workscan._setspectrum(f.fitter.getresidual(), r)
1938                self.blpars.append(f.get_parameters())
1939                self.masklists.append(workscan.get_masklist(f.mask, row=r))
1940                self.actualmask.append(f.mask)
1941
1942            if plot:
1943                f._p.unmap()
1944                f._p = None
1945            workscan._add_history("poly_baseline", varlist)
1946            if insitu:
1947                self._assign(workscan)
1948            else:
1949                return workscan
1950        except RuntimeError:
1951            msg = "The fit failed, possibly because it didn't converge."
1952            raise RuntimeError(msg)
1953
1954
1955    def poly_baseline(self, mask=None, order=0, plot=False, batch=False, insitu=None, rows=None):
1956        """\
1957        Return a scan which has been baselined (all rows) by a polynomial.
1958        Parameters:
1959            mask:       an optional mask
1960            order:      the order of the polynomial (default is 0)
1961            plot:       plot the fit and the residual. In this each
1962                        indivual fit has to be approved, by typing 'y'
1963                        or 'n'. Ignored if batch = True.
1964            batch:      if True a faster algorithm is used and logs
1965                        including the fit results are not output
1966                        (default is False)
1967            insitu:     if False a new scantable is returned.
1968                        Otherwise, the scaling is done in-situ
1969                        The default is taken from .asaprc (False)
1970            rows:       row numbers of spectra to be processed.
1971                        (default is None: for all rows)
1972        Example:
1973            # return a scan baselined by a third order polynomial,
1974            # not using a mask
1975            bscan = scan.poly_baseline(order=3)
1976        """
1977        if insitu is None: insitu = rcParams["insitu"]
1978        if insitu:
1979            workscan = self
1980        else:
1981            workscan = self.copy()
1982
1983        varlist = vars()
1984        nchan = workscan.nchan()
1985       
1986        if mask is None:
1987            mask = [True for i in xrange(nchan)]
1988
1989        try:
1990            if rows == None:
1991                rows = xrange(workscan.nrow())
1992            elif isinstance(rows, int):
1993                rows = [ rows ]
1994           
1995            if len(rows) > 0:
1996                self.blpars = []
1997                self.masklists = []
1998                self.actualmask = []
1999
2000            if batch:
2001                for r in rows:
2002                    workscan._poly_baseline_batch(mask, order, r)
2003            elif plot:
2004                f = fitter()
2005                f.set_function(lpoly=order)
2006                for r in rows:
2007                    f.x = workscan._getabcissa(r)
2008                    f.y = workscan._getspectrum(r)
2009                    f.mask = mask_and(mask, workscan._getmask(r))    # (CAS-1434)
2010                    f.data = None
2011                    f.fit()
2012                   
2013                    f.plot(residual=True)
2014                    accept_fit = raw_input("Accept fit ( [y]/n ): ")
2015                    if accept_fit.upper() == "N":
2016                        self.blpars.append(None)
2017                        self.masklists.append(None)
2018                        self.actualmask.append(None)
2019                        continue
2020                    workscan._setspectrum(f.fitter.getresidual(), r)
2021                    self.blpars.append(f.get_parameters())
2022                    self.masklists.append(workscan.get_masklist(f.mask, row=r))
2023                    self.actualmask.append(f.mask)
2024                   
2025                f._p.unmap()
2026                f._p = None
2027            else:
2028                import array
2029                for r in rows:
2030                    pars = array.array("f", [0.0 for i in range(order+1)])
2031                    pars_adr = pars.buffer_info()[0]
2032                    pars_len = pars.buffer_info()[1]
2033                   
2034                    errs = array.array("f", [0.0 for i in range(order+1)])
2035                    errs_adr = errs.buffer_info()[0]
2036                    errs_len = errs.buffer_info()[1]
2037                   
2038                    fmsk = array.array("i", [1 for i in range(nchan)])
2039                    fmsk_adr = fmsk.buffer_info()[0]
2040                    fmsk_len = fmsk.buffer_info()[1]
2041                   
2042                    workscan._poly_baseline(mask, order, r, pars_adr, pars_len, errs_adr, errs_len, fmsk_adr, fmsk_len)
2043                   
2044                    params = pars.tolist()
2045                    fmtd = ""
2046                    for i in xrange(len(params)): fmtd += "  p%d= %3.6f," % (i, params[i])
2047                    fmtd = fmtd[:-1]  # remove trailing ","
2048                    errors = errs.tolist()
2049                    fmask = fmsk.tolist()
2050                    for i in xrange(len(fmask)): fmask[i] = (fmask[i] > 0)    # transform (1/0) -> (True/False)
2051                   
2052                    self.blpars.append({"params":params, "fixed":[], "formatted":fmtd, "errors":errors})
2053                    self.masklists.append(workscan.get_masklist(fmask, r))
2054                    self.actualmask.append(fmask)
2055                   
2056                    asaplog.push(str(fmtd))
2057           
2058            workscan._add_history("poly_baseline", varlist)
2059           
2060            if insitu:
2061                self._assign(workscan)
2062            else:
2063                return workscan
2064           
2065        except RuntimeError, e:
2066            msg = "The fit failed, possibly because it didn't converge."
2067            if rcParams["verbose"]:
2068                asaplog.push(str(e))
2069                asaplog.push(str(msg))
2070                return
2071            else:
2072                raise RuntimeError(str(e)+'\n'+msg)
2073
2074
2075    def auto_poly_baseline(self, mask=None, edge=(0, 0), order=0,
2076                           threshold=3, chan_avg_limit=1, plot=False,
2077                           insitu=None, rows=None):
2078        """\
2079        Return a scan which has been baselined (all rows by default)
2080        by a polynomial.
2081        Spectral lines are detected first using linefinder and masked out
2082        to avoid them affecting the baseline solution.
2083
2084        Parameters:
2085
2086            mask:       an optional mask retreived from scantable
2087
2088            edge:       an optional number of channel to drop at the edge of
2089                        spectrum. If only one value is
2090                        specified, the same number will be dropped from
2091                        both sides of the spectrum. Default is to keep
2092                        all channels. Nested tuples represent individual
2093                        edge selection for different IFs (a number of spectral
2094                        channels can be different)
2095
2096            order:      the order of the polynomial (default is 0)
2097
2098            threshold:  the threshold used by line finder. It is better to
2099                        keep it large as only strong lines affect the
2100                        baseline solution.
2101
2102            chan_avg_limit:
2103                        a maximum number of consequtive spectral channels to
2104                        average during the search of weak and broad lines.
2105                        The default is no averaging (and no search for weak
2106                        lines). If such lines can affect the fitted baseline
2107                        (e.g. a high order polynomial is fitted), increase this
2108                        parameter (usually values up to 8 are reasonable). Most
2109                        users of this method should find the default value
2110                        sufficient.
2111
2112            plot:       plot the fit and the residual. In this each
2113                        indivual fit has to be approved, by typing 'y'
2114                        or 'n'
2115
2116            insitu:     if False a new scantable is returned.
2117                        Otherwise, the scaling is done in-situ
2118                        The default is taken from .asaprc (False)
2119            rows:       row numbers of spectra to be processed.
2120                        (default is None: for all rows)
2121
2122
2123        Example::
2124
2125            scan2 = scan.auto_poly_baseline(order=7, insitu=False)
2126
2127        """
2128        if insitu is None: insitu = rcParams['insitu']
2129        varlist = vars()
2130        from asap.asaplinefind import linefinder
2131        from asap import _is_sequence_or_number as _is_valid
2132
2133        # check whether edge is set up for each IF individually
2134        individualedge = False;
2135        if len(edge) > 1:
2136            if isinstance(edge[0], list) or isinstance(edge[0], tuple):
2137                individualedge = True;
2138
2139        if not _is_valid(edge, int) and not individualedge:
2140            raise ValueError, "Parameter 'edge' has to be an integer or a \
2141            pair of integers specified as a tuple. Nested tuples are allowed \
2142            to make individual selection for different IFs."
2143
2144        curedge = (0, 0)
2145        if individualedge:
2146            for edgepar in edge:
2147                if not _is_valid(edgepar, int):
2148                    raise ValueError, "Each element of the 'edge' tuple has \
2149                                       to be a pair of integers or an integer."
2150        else:
2151            curedge = edge;
2152
2153        if not insitu:
2154            workscan = self.copy()
2155        else:
2156            workscan = self
2157
2158        # setup fitter
2159        f = fitter()
2160        f.set_function(lpoly=order)
2161
2162        # setup line finder
2163        fl = linefinder()
2164        fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
2165
2166        fl.set_scan(workscan)
2167
2168        if mask is None:
2169            mask = _n_bools(workscan.nchan(), True)
2170       
2171        if rows is None:
2172            rows = xrange(workscan.nrow())
2173        elif isinstance(rows, int):
2174            rows = [ rows ]
2175       
2176        # Save parameters of baseline fits & masklists as a class attribute.
2177        # NOTICE: It does not reflect changes in scantable!
2178        if len(rows) > 0:
2179            self.blpars=[]
2180            self.masklists=[]
2181            self.actualmask=[]
2182        asaplog.push("Processing:")
2183        for r in rows:
2184            msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
2185                (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
2186                 workscan.getpol(r), workscan.getcycle(r))
2187            asaplog.push(msg, False)
2188
2189            # figure out edge parameter
2190            if individualedge:
2191                if len(edge) >= workscan.getif(r):
2192                    raise RuntimeError, "Number of edge elements appear to " \
2193                                        "be less than the number of IFs"
2194                    curedge = edge[workscan.getif(r)]
2195
2196            actualmask = mask_and(mask, workscan._getmask(r))    # (CAS-1434)
2197
2198            # setup line finder
2199            fl.find_lines(r, actualmask, curedge)
2200           
2201            f.x = workscan._getabcissa(r)
2202            f.y = workscan._getspectrum(r)
2203            f.mask = fl.get_mask()
2204            f.data = None
2205            f.fit()
2206
2207            # Show mask list
2208            masklist=workscan.get_masklist(f.mask, row=r)
2209            msg = "mask range: "+str(masklist)
2210            asaplog.push(msg, False)
2211
2212            if plot:
2213                f.plot(residual=True)
2214                x = raw_input("Accept fit ( [y]/n ): ")
2215                if x.upper() == 'N':
2216                    self.blpars.append(None)
2217                    self.masklists.append(None)
2218                    self.actualmask.append(None)
2219                    continue
2220
2221            workscan._setspectrum(f.fitter.getresidual(), r)
2222            self.blpars.append(f.get_parameters())
2223            self.masklists.append(masklist)
2224            self.actualmask.append(f.mask)
2225        if plot:
2226            f._p.unmap()
2227            f._p = None
2228        workscan._add_history("auto_poly_baseline", varlist)
2229        if insitu:
2230            self._assign(workscan)
2231        else:
2232            return workscan
2233
2234    @asaplog_post_dec
2235    def rotate_linpolphase(self, angle):
2236        """\
2237        Rotate the phase of the complex polarization O=Q+iU correlation.
2238        This is always done in situ in the raw data.  So if you call this
2239        function more than once then each call rotates the phase further.
2240
2241        Parameters:
2242
2243            angle:   The angle (degrees) to rotate (add) by.
2244
2245        Example::
2246
2247            scan.rotate_linpolphase(2.3)
2248
2249        """
2250        varlist = vars()
2251        self._math._rotate_linpolphase(self, angle)
2252        self._add_history("rotate_linpolphase", varlist)
2253        return
2254
2255    @asaplog_post_dec
2256    def rotate_xyphase(self, angle):
2257        """\
2258        Rotate the phase of the XY correlation.  This is always done in situ
2259        in the data.  So if you call this function more than once
2260        then each call rotates the phase further.
2261
2262        Parameters:
2263
2264            angle:   The angle (degrees) to rotate (add) by.
2265
2266        Example::
2267
2268            scan.rotate_xyphase(2.3)
2269
2270        """
2271        varlist = vars()
2272        self._math._rotate_xyphase(self, angle)
2273        self._add_history("rotate_xyphase", varlist)
2274        return
2275
2276    @asaplog_post_dec
2277    def swap_linears(self):
2278        """\
2279        Swap the linear polarisations XX and YY, or better the first two
2280        polarisations as this also works for ciculars.
2281        """
2282        varlist = vars()
2283        self._math._swap_linears(self)
2284        self._add_history("swap_linears", varlist)
2285        return
2286
2287    @asaplog_post_dec
2288    def invert_phase(self):
2289        """\
2290        Invert the phase of the complex polarisation
2291        """
2292        varlist = vars()
2293        self._math._invert_phase(self)
2294        self._add_history("invert_phase", varlist)
2295        return
2296
2297    @asaplog_post_dec
2298    def add(self, offset, insitu=None):
2299        """\
2300        Return a scan where all spectra have the offset added
2301
2302        Parameters:
2303
2304            offset:      the offset
2305
2306            insitu:      if False a new scantable is returned.
2307                         Otherwise, the scaling is done in-situ
2308                         The default is taken from .asaprc (False)
2309
2310        """
2311        if insitu is None: insitu = rcParams['insitu']
2312        self._math._setinsitu(insitu)
2313        varlist = vars()
2314        s = scantable(self._math._unaryop(self, offset, "ADD", False))
2315        s._add_history("add", varlist)
2316        if insitu:
2317            self._assign(s)
2318        else:
2319            return s
2320
2321    @asaplog_post_dec
2322    def scale(self, factor, tsys=True, insitu=None):
2323        """\
2324
2325        Return a scan where all spectra are scaled by the given 'factor'
2326
2327        Parameters:
2328
2329            factor:      the scaling factor (float or 1D float list)
2330
2331            insitu:      if False a new scantable is returned.
2332                         Otherwise, the scaling is done in-situ
2333                         The default is taken from .asaprc (False)
2334
2335            tsys:        if True (default) then apply the operation to Tsys
2336                         as well as the data
2337
2338        """
2339        if insitu is None: insitu = rcParams['insitu']
2340        self._math._setinsitu(insitu)
2341        varlist = vars()
2342        s = None
2343        import numpy
2344        if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
2345            if isinstance(factor[0], list) or isinstance(factor[0], numpy.ndarray):
2346                from asapmath import _array2dOp
2347                s = _array2dOp( self.copy(), factor, "MUL", tsys )
2348            else:
2349                s = scantable( self._math._arrayop( self.copy(), factor, "MUL", tsys ) )
2350        else:
2351            s = scantable(self._math._unaryop(self.copy(), factor, "MUL", tsys))
2352        s._add_history("scale", varlist)
2353        if insitu:
2354            self._assign(s)
2355        else:
2356            return s
2357
2358    def set_sourcetype(self, match, matchtype="pattern",
2359                       sourcetype="reference"):
2360        """\
2361        Set the type of the source to be an source or reference scan
2362        using the provided pattern.
2363
2364        Parameters:
2365
2366            match:          a Unix style pattern, regular expression or selector
2367
2368            matchtype:      'pattern' (default) UNIX style pattern or
2369                            'regex' regular expression
2370
2371            sourcetype:     the type of the source to use (source/reference)
2372
2373        """
2374        varlist = vars()
2375        basesel = self.get_selection()
2376        stype = -1
2377        if sourcetype.lower().startswith("r"):
2378            stype = 1
2379        elif sourcetype.lower().startswith("s"):
2380            stype = 0
2381        else:
2382            raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
2383        if matchtype.lower().startswith("p"):
2384            matchtype = "pattern"
2385        elif matchtype.lower().startswith("r"):
2386            matchtype = "regex"
2387        else:
2388            raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
2389        sel = selector()
2390        if isinstance(match, selector):
2391            sel = match
2392        else:
2393            sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
2394        self.set_selection(basesel+sel)
2395        self._setsourcetype(stype)
2396        self.set_selection(basesel)
2397        self._add_history("set_sourcetype", varlist)
2398
2399    @asaplog_post_dec
2400    @preserve_selection
2401    def auto_quotient(self, preserve=True, mode='paired', verify=False):
2402        """\
2403        This function allows to build quotients automatically.
2404        It assumes the observation to have the same number of
2405        "ons" and "offs"
2406
2407        Parameters:
2408
2409            preserve:       you can preserve (default) the continuum or
2410                            remove it.  The equations used are
2411
2412                            preserve: Output = Toff * (on/off) - Toff
2413
2414                            remove:   Output = Toff * (on/off) - Ton
2415
2416            mode:           the on/off detection mode
2417                            'paired' (default)
2418                            identifies 'off' scans by the
2419                            trailing '_R' (Mopra/Parkes) or
2420                            '_e'/'_w' (Tid) and matches
2421                            on/off pairs from the observing pattern
2422                            'time'
2423                            finds the closest off in time
2424
2425        .. todo:: verify argument is not implemented
2426
2427        """
2428        varlist = vars()
2429        modes = ["time", "paired"]
2430        if not mode in modes:
2431            msg = "please provide valid mode. Valid modes are %s" % (modes)
2432            raise ValueError(msg)
2433        s = None
2434        if mode.lower() == "paired":
2435            sel = self.get_selection()
2436            sel.set_query("SRCTYPE==psoff")
2437            self.set_selection(sel)
2438            offs = self.copy()
2439            sel.set_query("SRCTYPE==pson")
2440            self.set_selection(sel)
2441            ons = self.copy()
2442            s = scantable(self._math._quotient(ons, offs, preserve))
2443        elif mode.lower() == "time":
2444            s = scantable(self._math._auto_quotient(self, mode, preserve))
2445        s._add_history("auto_quotient", varlist)
2446        return s
2447
2448    @asaplog_post_dec
2449    def mx_quotient(self, mask = None, weight='median', preserve=True):
2450        """\
2451        Form a quotient using "off" beams when observing in "MX" mode.
2452
2453        Parameters:
2454
2455            mask:           an optional mask to be used when weight == 'stddev'
2456
2457            weight:         How to average the off beams.  Default is 'median'.
2458
2459            preserve:       you can preserve (default) the continuum or
2460                            remove it.  The equations used are:
2461
2462                                preserve: Output = Toff * (on/off) - Toff
2463
2464                                remove:   Output = Toff * (on/off) - Ton
2465
2466        """
2467        mask = mask or ()
2468        varlist = vars()
2469        on = scantable(self._math._mx_extract(self, 'on'))
2470        preoff = scantable(self._math._mx_extract(self, 'off'))
2471        off = preoff.average_time(mask=mask, weight=weight, scanav=False)
2472        from asapmath  import quotient
2473        q = quotient(on, off, preserve)
2474        q._add_history("mx_quotient", varlist)
2475        return q
2476
2477    @asaplog_post_dec
2478    def freq_switch(self, insitu=None):
2479        """\
2480        Apply frequency switching to the data.
2481
2482        Parameters:
2483
2484            insitu:      if False a new scantable is returned.
2485                         Otherwise, the swictching is done in-situ
2486                         The default is taken from .asaprc (False)
2487
2488        """
2489        if insitu is None: insitu = rcParams['insitu']
2490        self._math._setinsitu(insitu)
2491        varlist = vars()
2492        s = scantable(self._math._freqswitch(self))
2493        s._add_history("freq_switch", varlist)
2494        if insitu:
2495            self._assign(s)
2496        else:
2497            return s
2498
2499    @asaplog_post_dec
2500    def recalc_azel(self):
2501        """Recalculate the azimuth and elevation for each position."""
2502        varlist = vars()
2503        self._recalcazel()
2504        self._add_history("recalc_azel", varlist)
2505        return
2506
2507    @asaplog_post_dec
2508    def __add__(self, other):
2509        varlist = vars()
2510        s = None
2511        if isinstance(other, scantable):
2512            s = scantable(self._math._binaryop(self, other, "ADD"))
2513        elif isinstance(other, float):
2514            s = scantable(self._math._unaryop(self, other, "ADD", False))
2515        else:
2516            raise TypeError("Other input is not a scantable or float value")
2517        s._add_history("operator +", varlist)
2518        return s
2519
2520    @asaplog_post_dec
2521    def __sub__(self, other):
2522        """
2523        implicit on all axes and on Tsys
2524        """
2525        varlist = vars()
2526        s = None
2527        if isinstance(other, scantable):
2528            s = scantable(self._math._binaryop(self, other, "SUB"))
2529        elif isinstance(other, float):
2530            s = scantable(self._math._unaryop(self, other, "SUB", False))
2531        else:
2532            raise TypeError("Other input is not a scantable or float value")
2533        s._add_history("operator -", varlist)
2534        return s
2535
2536    @asaplog_post_dec
2537    def __mul__(self, other):
2538        """
2539        implicit on all axes and on Tsys
2540        """
2541        varlist = vars()
2542        s = None
2543        if isinstance(other, scantable):
2544            s = scantable(self._math._binaryop(self, other, "MUL"))
2545        elif isinstance(other, float):
2546            s = scantable(self._math._unaryop(self, other, "MUL", False))
2547        else:
2548            raise TypeError("Other input is not a scantable or float value")
2549        s._add_history("operator *", varlist)
2550        return s
2551
2552
2553    @asaplog_post_dec
2554    def __div__(self, other):
2555        """
2556        implicit on all axes and on Tsys
2557        """
2558        varlist = vars()
2559        s = None
2560        if isinstance(other, scantable):
2561            s = scantable(self._math._binaryop(self, other, "DIV"))
2562        elif isinstance(other, float):
2563            if other == 0.0:
2564                raise ZeroDivisionError("Dividing by zero is not recommended")
2565            s = scantable(self._math._unaryop(self, other, "DIV", False))
2566        else:
2567            raise TypeError("Other input is not a scantable or float value")
2568        s._add_history("operator /", varlist)
2569        return s
2570
2571    @asaplog_post_dec
2572    def get_fit(self, row=0):
2573        """\
2574        Print or return the stored fits for a row in the scantable
2575
2576        Parameters:
2577
2578            row:    the row which the fit has been applied to.
2579
2580        """
2581        if row > self.nrow():
2582            return
2583        from asap.asapfit import asapfit
2584        fit = asapfit(self._getfit(row))
2585        asaplog.push( '%s' %(fit) )
2586        return fit.as_dict()
2587
2588    def flag_nans(self):
2589        """\
2590        Utility function to flag NaN values in the scantable.
2591        """
2592        import numpy
2593        basesel = self.get_selection()
2594        for i in range(self.nrow()):
2595            sel = self.get_row_selector(i)
2596            self.set_selection(basesel+sel)
2597            nans = numpy.isnan(self._getspectrum(0))
2598        if numpy.any(nans):
2599            bnans = [ bool(v) for v in nans]
2600            self.flag(bnans)
2601        self.set_selection(basesel)
2602
2603    def get_row_selector(self, rowno):
2604        return selector(beams=self.getbeam(rowno),
2605                        ifs=self.getif(rowno),
2606                        pols=self.getpol(rowno),
2607                        scans=self.getscan(rowno),
2608                        cycles=self.getcycle(rowno))
2609
2610    def _add_history(self, funcname, parameters):
2611        if not rcParams['scantable.history']:
2612            return
2613        # create date
2614        sep = "##"
2615        from datetime import datetime
2616        dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
2617        hist = dstr+sep
2618        hist += funcname+sep#cdate+sep
2619        if parameters.has_key('self'): del parameters['self']
2620        for k, v in parameters.iteritems():
2621            if type(v) is dict:
2622                for k2, v2 in v.iteritems():
2623                    hist += k2
2624                    hist += "="
2625                    if isinstance(v2, scantable):
2626                        hist += 'scantable'
2627                    elif k2 == 'mask':
2628                        if isinstance(v2, list) or isinstance(v2, tuple):
2629                            hist += str(self._zip_mask(v2))
2630                        else:
2631                            hist += str(v2)
2632                    else:
2633                        hist += str(v2)
2634            else:
2635                hist += k
2636                hist += "="
2637                if isinstance(v, scantable):
2638                    hist += 'scantable'
2639                elif k == 'mask':
2640                    if isinstance(v, list) or isinstance(v, tuple):
2641                        hist += str(self._zip_mask(v))
2642                    else:
2643                        hist += str(v)
2644                else:
2645                    hist += str(v)
2646            hist += sep
2647        hist = hist[:-2] # remove trailing '##'
2648        self._addhistory(hist)
2649
2650
2651    def _zip_mask(self, mask):
2652        mask = list(mask)
2653        i = 0
2654        segments = []
2655        while mask[i:].count(1):
2656            i += mask[i:].index(1)
2657            if mask[i:].count(0):
2658                j = i + mask[i:].index(0)
2659            else:
2660                j = len(mask)
2661            segments.append([i, j])
2662            i = j
2663        return segments
2664
2665    def _get_ordinate_label(self):
2666        fu = "("+self.get_fluxunit()+")"
2667        import re
2668        lbl = "Intensity"
2669        if re.match(".K.", fu):
2670            lbl = "Brightness Temperature "+ fu
2671        elif re.match(".Jy.", fu):
2672            lbl = "Flux density "+ fu
2673        return lbl
2674
2675    def _check_ifs(self):
2676        nchans = [self.nchan(i) for i in range(self.nif(-1))]
2677        nchans = filter(lambda t: t > 0, nchans)
2678        return (sum(nchans)/len(nchans) == nchans[0])
2679
2680    @asaplog_post_dec
2681    #def _fill(self, names, unit, average, getpt, antenna):
2682    def _fill(self, names, unit, average, opts={}):
2683        first = True
2684        fullnames = []
2685        for name in names:
2686            name = os.path.expandvars(name)
2687            name = os.path.expanduser(name)
2688            if not os.path.exists(name):
2689                msg = "File '%s' does not exists" % (name)
2690                raise IOError(msg)
2691            fullnames.append(name)
2692        if average:
2693            asaplog.push('Auto averaging integrations')
2694        stype = int(rcParams['scantable.storage'].lower() == 'disk')
2695        for name in fullnames:
2696            tbl = Scantable(stype)
2697            r = filler(tbl)
2698            rx = rcParams['scantable.reference']
2699            r.setreferenceexpr(rx)
2700            msg = "Importing %s..." % (name)
2701            asaplog.push(msg, False)
2702            #opts = {'ms': {'antenna' : antenna, 'getpt': getpt} }
2703            r.open(name, opts)# antenna, -1, -1, getpt)
2704            r.fill()
2705            if average:
2706                tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
2707            if not first:
2708                tbl = self._math._merge([self, tbl])
2709            Scantable.__init__(self, tbl)
2710            r.close()
2711            del r, tbl
2712            first = False
2713            #flush log
2714        asaplog.post()
2715        if unit is not None:
2716            self.set_fluxunit(unit)
2717        if not is_casapy():
2718            self.set_freqframe(rcParams['scantable.freqframe'])
2719
2720    def __getitem__(self, key):
2721        if key < 0:
2722            key += self.nrow()
2723        if key >= self.nrow():
2724            raise IndexError("Row index out of range.")
2725        return self._getspectrum(key)
2726
2727    def __setitem__(self, key, value):
2728        if key < 0:
2729            key += self.nrow()
2730        if key >= self.nrow():
2731            raise IndexError("Row index out of range.")
2732        if not hasattr(value, "__len__") or \
2733                len(value) > self.nchan(self.getif(key)):
2734            raise ValueError("Spectrum length doesn't match.")
2735        return self._setspectrum(value, key)
2736
2737    def __len__(self):
2738        return self.nrow()
2739
2740    def __iter__(self):
2741        for i in range(len(self)):
2742            yield self[i]
Note: See TracBrowser for help on using the repository browser.