source: trunk/python/scantable.py @ 1883

Last change on this file since 1883 was 1883, checked in by Takeshi Nakazato, 14 years ago

New Development: No

JIRA Issue: No

Ready for Test: Yes

Interface Changes: No

What Interface Changed: Please list interface changes

Test Programs: read MS in reference table

Put in Release Notes: Yes/No?

Module(s): Module Names change impacts.

Description: Describe your changes here...

Bug fix on is_scantable() and similar routine in the codes.
They are now able to recognize reference table correctly.

splitant() is working with reference table. Thus, performance
is a bit improved since deep copy is no longer necessary.


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 86.6 KB
Line 
1"""This module defines the scantable class."""
2
3import os
4try:
5    from functools import wraps as wraps_dec
6except ImportError:
7    from asap.compatibility import wraps as wraps_dec
8
9from asap.env import is_casapy
10from asap._asap import Scantable
11from asap._asap import filler
12from asap.parameters import rcParams
13from asap.logging import asaplog, asaplog_post_dec
14from asap.selector import selector
15from asap.linecatalog import linecatalog
16from asap.coordinate import coordinate
17from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
18
19
20def preserve_selection(func):
21    @wraps_dec(func)
22    def wrap(obj, *args, **kw):
23        basesel = obj.get_selection()
24        try:
25            val = func(obj, *args, **kw)
26        finally:
27            obj.set_selection(basesel)
28        return val
29    return wrap
30
31def is_scantable(filename):
32    """Is the given file a scantable?
33
34    Parameters:
35
36        filename: the name of the file/directory to test
37
38    """
39    if ( os.path.isdir(filename)
40         and os.path.exists(filename+'/table.info')
41         and os.path.exists(filename+'/table.dat') ):
42        f=open(filename+'/table.info')
43        l=f.readline()
44        f.close()
45        #if ( l.find('Scantable') != -1 ):
46        if ( l.find('Measurement Set') == -1 ):
47            return True
48        else:
49            return False
50    else:
51        return False
52##     return (os.path.isdir(filename)
53##             and not os.path.exists(filename+'/table.f1')
54##             and os.path.exists(filename+'/table.info'))
55
56def is_ms(filename):
57    """Is the given file a MeasurementSet?
58
59    Parameters:
60
61        filename: the name of the file/directory to test
62
63    """
64    if ( os.path.isdir(filename)
65         and os.path.exists(filename+'/table.info')
66         and os.path.exists(filename+'/table.dat') ):
67        f=open(filename+'/table.info')
68        l=f.readline()
69        f.close()
70        if ( l.find('Measurement Set') != -1 ):
71            return True
72        else:
73            return False
74    else:
75        return False
76   
77class scantable(Scantable):
78    """\
79        The ASAP container for scans (single-dish data).
80    """
81
82    @asaplog_post_dec
83    def __init__(self, filename, average=None, unit=None, getpt=None,
84                 antenna=None, parallactify=None):
85        """\
86        Create a scantable from a saved one or make a reference
87
88        Parameters:
89
90            filename:     the name of an asap table on disk
91                          or
92                          the name of a rpfits/sdfits/ms file
93                          (integrations within scans are auto averaged
94                          and the whole file is read) or
95                          [advanced] a reference to an existing scantable
96
97            average:      average all integrations withinb a scan on read.
98                          The default (True) is taken from .asaprc.
99
100            unit:         brightness unit; must be consistent with K or Jy.
101                          Over-rides the default selected by the filler
102                          (input rpfits/sdfits/ms) or replaces the value
103                          in existing scantables
104
105            getpt:        for MeasurementSet input data only:
106                          If True, all pointing data are filled.
107                          The deafult is False, which makes time to load
108                          the MS data faster in some cases.
109
110            antenna:      Antenna selection. integer (id) or string (name or id).
111
112            parallactify: Indicate that the data had been parallatified. Default
113                          is taken from rc file.
114
115        """
116        if average is None:
117            average = rcParams['scantable.autoaverage']
118        if getpt is None:
119            getpt = True
120        if antenna is not None:
121            asaplog.push("Antenna selection currently unsupported."
122                         "Using ''")
123            asaplog.post('WARN')
124        if antenna is None:
125            antenna = ''
126        elif type(antenna) == int:
127            antenna = '%s' % antenna
128        elif type(antenna) == list:
129            tmpstr = ''
130            for i in range( len(antenna) ):
131                if type(antenna[i]) == int:
132                    tmpstr = tmpstr + ('%s,'%(antenna[i]))
133                elif type(antenna[i]) == str:
134                    tmpstr=tmpstr+antenna[i]+','
135                else:
136                    raise TypeError('Bad antenna selection.')
137            antenna = tmpstr.rstrip(',')
138        parallactify = parallactify or rcParams['scantable.parallactify']
139        varlist = vars()
140        from asap._asap import stmath
141        self._math = stmath( rcParams['insitu'] )
142        if isinstance(filename, Scantable):
143            Scantable.__init__(self, filename)
144        else:
145            if isinstance(filename, str):
146                filename = os.path.expandvars(filename)
147                filename = os.path.expanduser(filename)
148                if not os.path.exists(filename):
149                    s = "File '%s' not found." % (filename)
150                    raise IOError(s)
151                if is_scantable(filename):
152                    ondisk = rcParams['scantable.storage'] == 'disk'
153                    Scantable.__init__(self, filename, ondisk)
154                    if unit is not None:
155                        self.set_fluxunit(unit)
156                    # do not reset to the default freqframe
157                    #self.set_freqframe(rcParams['scantable.freqframe'])
158                #elif os.path.isdir(filename) \
159                #         and not os.path.exists(filename+'/table.f1'):
160                elif is_ms(filename):
161                    self._fill([filename], unit, average, getpt, antenna)
162                else:
163                    msg = "The given file '%s'is not a valid " \
164                          "asap table." % (filename)
165                    raise IOError(msg)
166            elif (isinstance(filename, list) or isinstance(filename, tuple)) \
167                  and isinstance(filename[-1], str):
168                self._fill(filename, unit, average, getpt, antenna)
169        self.parallactify(parallactify)
170        self._add_history("scantable", varlist)
171
172    @asaplog_post_dec
173    def save(self, name=None, format=None, overwrite=False):
174        """\
175        Store the scantable on disk. This can be an asap (aips++) Table,
176        SDFITS or MS2 format.
177
178        Parameters:
179
180            name:        the name of the outputfile. For format "ASCII"
181                         this is the root file name (data in 'name'.txt
182                         and header in 'name'_header.txt)
183
184            format:      an optional file format. Default is ASAP.
185                         Allowed are:
186
187                            * 'ASAP' (save as ASAP [aips++] Table),
188                            * 'SDFITS' (save as SDFITS file)
189                            * 'ASCII' (saves as ascii text file)
190                            * 'MS2' (saves as an casacore MeasurementSet V2)
191                            * 'FITS' (save as image FITS - not readable by class)
192                            * 'CLASS' (save as FITS readable by CLASS)
193
194            overwrite:   If the file should be overwritten if it exists.
195                         The default False is to return with warning
196                         without writing the output. USE WITH CARE.
197
198        Example::
199
200            scan.save('myscan.asap')
201            scan.save('myscan.sdfits', 'SDFITS')
202
203        """
204        from os import path
205        format = format or rcParams['scantable.save']
206        suffix = '.'+format.lower()
207        if name is None or name == "":
208            name = 'scantable'+suffix
209            msg = "No filename given. Using default name %s..." % name
210            asaplog.push(msg)
211        name = path.expandvars(name)
212        if path.isfile(name) or path.isdir(name):
213            if not overwrite:
214                msg = "File %s exists." % name
215                raise IOError(msg)
216        format2 = format.upper()
217        if format2 == 'ASAP':
218            self._save(name)
219        else:
220            from asap._asap import stwriter as stw
221            writer = stw(format2)
222            writer.write(self, name)
223        return
224
225    def copy(self):
226        """Return a copy of this scantable.
227
228        *Note*:
229
230            This makes a full (deep) copy. scan2 = scan1 makes a reference.
231
232        Example::
233
234            copiedscan = scan.copy()
235
236        """
237        sd = scantable(Scantable._copy(self))
238        return sd
239
240    def drop_scan(self, scanid=None):
241        """\
242        Return a new scantable where the specified scan number(s) has(have)
243        been dropped.
244
245        Parameters:
246
247            scanid:    a (list of) scan number(s)
248
249        """
250        from asap import _is_sequence_or_number as _is_valid
251        from asap import _to_list
252        from asap import unique
253        if not _is_valid(scanid):
254            raise RuntimeError( 'Please specify a scanno to drop from the scantable' )
255        scanid = _to_list(scanid)
256        allscans = unique([ self.getscan(i) for i in range(self.nrow())])
257        for sid in scanid: allscans.remove(sid)
258        if len(allscans) == 0:
259            raise ValueError("Can't remove all scans")
260        sel = selector(scans=allscans)
261        return self._select_copy(sel)
262
263    def _select_copy(self, selection):
264        orig = self.get_selection()
265        self.set_selection(orig+selection)
266        cp = self.copy()
267        self.set_selection(orig)
268        return cp
269
270    def get_scan(self, scanid=None):
271        """\
272        Return a specific scan (by scanno) or collection of scans (by
273        source name) in a new scantable.
274
275        *Note*:
276
277            See scantable.drop_scan() for the inverse operation.
278
279        Parameters:
280
281            scanid:    a (list of) scanno or a source name, unix-style
282                       patterns are accepted for source name matching, e.g.
283                       '*_R' gets all 'ref scans
284
285        Example::
286
287            # get all scans containing the source '323p459'
288            newscan = scan.get_scan('323p459')
289            # get all 'off' scans
290            refscans = scan.get_scan('*_R')
291            # get a susbset of scans by scanno (as listed in scan.summary())
292            newscan = scan.get_scan([0, 2, 7, 10])
293
294        """
295        if scanid is None:
296            raise RuntimeError( 'Please specify a scan no or name to '
297                                'retrieve from the scantable' )
298        try:
299            bsel = self.get_selection()
300            sel = selector()
301            if type(scanid) is str:
302                sel.set_name(scanid)
303                return self._select_copy(sel)
304            elif type(scanid) is int:
305                sel.set_scans([scanid])
306                return self._select_copy(sel)
307            elif type(scanid) is list:
308                sel.set_scans(scanid)
309                return self._select_copy(sel)
310            else:
311                msg = "Illegal scanid type, use 'int' or 'list' if ints."
312                raise TypeError(msg)
313        except RuntimeError:
314            raise
315
316    def __str__(self):
317        return Scantable._summary(self, True)
318
319    def summary(self, filename=None):
320        """\
321        Print a summary of the contents of this scantable.
322
323        Parameters:
324
325            filename:    the name of a file to write the putput to
326                         Default - no file output
327
328        """
329        info = Scantable._summary(self, True)
330        if filename is not None:
331            if filename is "":
332                filename = 'scantable_summary.txt'
333            from os.path import expandvars, isdir
334            filename = expandvars(filename)
335            if not isdir(filename):
336                data = open(filename, 'w')
337                data.write(info)
338                data.close()
339            else:
340                msg = "Illegal file name '%s'." % (filename)
341                raise IOError(msg)
342        return page(info)
343
344    def get_spectrum(self, rowno):
345        """Return the spectrum for the current row in the scantable as a list.
346
347        Parameters:
348
349             rowno:   the row number to retrieve the spectrum from
350
351        """
352        return self._getspectrum(rowno)
353
354    def get_mask(self, rowno):
355        """Return the mask for the current row in the scantable as a list.
356
357        Parameters:
358
359             rowno:   the row number to retrieve the mask from
360
361        """
362        return self._getmask(rowno)
363
364    def set_spectrum(self, spec, rowno):
365        """Return the spectrum for the current row in the scantable as a list.
366
367        Parameters:
368
369             spec:   the new spectrum
370
371             rowno:  the row number to set the spectrum for
372
373        """
374        assert(len(spec) == self.nchan())
375        return self._setspectrum(spec, rowno)
376
377    def get_coordinate(self, rowno):
378        """Return the (spectral) coordinate for a a given 'rowno'.
379
380        *Note*:
381
382            * This coordinate is only valid until a scantable method modifies
383              the frequency axis.
384            * This coordinate does contain the original frequency set-up
385              NOT the new frame. The conversions however are done using the user
386              specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
387              use scantable.freq_align first. Without it there is no closure,
388              i.e.::
389
390                  c = myscan.get_coordinate(0)
391                  c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
392
393        Parameters:
394
395             rowno:    the row number for the spectral coordinate
396
397        """
398        return coordinate(Scantable.get_coordinate(self, rowno))
399
400    def get_selection(self):
401        """\
402        Get the selection object currently set on this scantable.
403
404        Example::
405
406            sel = scan.get_selection()
407            sel.set_ifs(0)              # select IF 0
408            scan.set_selection(sel)     # apply modified selection
409
410        """
411        return selector(self._getselection())
412
413    def set_selection(self, selection=None, **kw):
414        """\
415        Select a subset of the data. All following operations on this scantable
416        are only applied to thi selection.
417
418        Parameters:
419
420            selection:    a selector object (default unset the selection), or
421                          any combination of "pols", "ifs", "beams", "scans",
422                          "cycles", "name", "query"
423
424        Examples::
425
426            sel = selector()         # create a selection object
427            self.set_scans([0, 3])    # select SCANNO 0 and 3
428            scan.set_selection(sel)  # set the selection
429            scan.summary()           # will only print summary of scanno 0 an 3
430            scan.set_selection()     # unset the selection
431            # or the equivalent
432            scan.set_selection(scans=[0,3])
433            scan.summary()           # will only print summary of scanno 0 an 3
434            scan.set_selection()     # unset the selection
435
436        """
437        if selection is None:
438            # reset
439            if len(kw) == 0:
440                selection = selector()
441            else:
442                # try keywords
443                for k in kw:
444                    if k not in selector.fields:
445                        raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
446                selection = selector(**kw)
447        self._setselection(selection)
448
449    def get_row(self, row=0, insitu=None):
450        """\
451        Select a row in the scantable.
452        Return a scantable with single row.
453
454        Parameters:
455
456            row:    row no of integration, default is 0.
457            insitu: if False a new scantable is returned. Otherwise, the
458                    scaling is done in-situ. The default is taken from .asaprc
459                    (False)
460
461        """
462        if insitu is None: insitu = rcParams['insitu']
463        if not insitu:
464            workscan = self.copy()
465        else:
466            workscan = self
467        # Select a row
468        sel=selector()
469        sel.set_scans([workscan.getscan(row)])
470        sel.set_cycles([workscan.getcycle(row)])
471        sel.set_beams([workscan.getbeam(row)])
472        sel.set_ifs([workscan.getif(row)])
473        sel.set_polarisations([workscan.getpol(row)])
474        sel.set_name(workscan._getsourcename(row))
475        workscan.set_selection(sel)
476        if not workscan.nrow() == 1:
477            msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
478            raise RuntimeError(msg)
479        del sel
480        if insitu:
481            self._assign(workscan)
482        else:
483            return workscan
484
485    @asaplog_post_dec
486    def stats(self, stat='stddev', mask=None, form='3.3f'):
487        """\
488        Determine the specified statistic of the current beam/if/pol
489        Takes a 'mask' as an optional parameter to specify which
490        channels should be excluded.
491
492        Parameters:
493
494            stat:    'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
495                     'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
496
497            mask:    an optional mask specifying where the statistic
498                     should be determined.
499
500            form:    format string to print statistic values
501
502        Example::
503
504            scan.set_unit('channel')
505            msk = scan.create_mask([100, 200], [500, 600])
506            scan.stats(stat='mean', mask=m)
507
508        """
509        mask = mask or []
510        if not self._check_ifs():
511            raise ValueError("Cannot apply mask as the IFs have different "
512                             "number of channels. Please use setselection() "
513                             "to select individual IFs")
514        rtnabc = False
515        if stat.lower().endswith('_abc'): rtnabc = True
516        getchan = False
517        if stat.lower().startswith('min') or stat.lower().startswith('max'):
518            chan = self._math._minmaxchan(self, mask, stat)
519            getchan = True
520            statvals = []
521        if not rtnabc: statvals = self._math._stats(self, mask, stat)
522
523        #def cb(i):
524        #    return statvals[i]
525
526        #return self._row_callback(cb, stat)
527
528        label=stat
529        #callback=cb
530        out = ""
531        #outvec = []
532        sep = '-'*50
533        for i in range(self.nrow()):
534            refstr = ''
535            statunit= ''
536            if getchan:
537                qx, qy = self.chan2data(rowno=i, chan=chan[i])
538                if rtnabc:
539                    statvals.append(qx['value'])
540                    refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
541                    statunit= '['+qx['unit']+']'
542                else:
543                    refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
544
545            tm = self._gettime(i)
546            src = self._getsourcename(i)
547            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
548            out += 'Time[%s]:\n' % (tm)
549            if self.nbeam(-1) > 1:
550                out +=  ' Beam[%d] ' % (self.getbeam(i))
551            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (self.getif(i))
552            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (self.getpol(i))
553            #outvec.append(callback(i))
554            #out += ('= %'+form) % (outvec[i]) +'   '+refstr+'\n'
555            out += ('= %'+form) % (statvals[i]) +'   '+refstr+'\n'
556            out +=  sep+"\n"
557
558
559        import os
560        if os.environ.has_key( 'USER' ):
561            usr = os.environ['USER']
562        else:
563            import commands
564            usr = commands.getoutput( 'whoami' )
565        tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
566        f = open(tmpfile,'w')
567        print >> f, sep
568        print >> f, ' %s %s' % (label, statunit)
569        print >> f, sep
570        print >> f, out
571        f.close()
572        f = open(tmpfile,'r')
573        x = f.readlines()
574        f.close()
575        asaplog.push(''.join(x), False)
576
577        return statvals
578
579    def chan2data(self, rowno=0, chan=0):
580        """\
581        Returns channel/frequency/velocity and spectral value
582        at an arbitrary row and channel in the scantable.
583
584        Parameters:
585
586            rowno:   a row number in the scantable. Default is the
587                     first row, i.e. rowno=0
588
589            chan:    a channel in the scantable. Default is the first
590                     channel, i.e. pos=0
591
592        """
593        if isinstance(rowno, int) and isinstance(chan, int):
594            qx = {'unit': self.get_unit(),
595                  'value': self._getabcissa(rowno)[chan]}
596            qy = {'unit': self.get_fluxunit(),
597                  'value': self._getspectrum(rowno)[chan]}
598            return qx, qy
599
600    def stddev(self, mask=None):
601        """\
602        Determine the standard deviation of the current beam/if/pol
603        Takes a 'mask' as an optional parameter to specify which
604        channels should be excluded.
605
606        Parameters:
607
608            mask:    an optional mask specifying where the standard
609                     deviation should be determined.
610
611        Example::
612
613            scan.set_unit('channel')
614            msk = scan.create_mask([100, 200], [500, 600])
615            scan.stddev(mask=m)
616
617        """
618        return self.stats(stat='stddev', mask=mask);
619
620
621    def get_column_names(self):
622        """\
623        Return a  list of column names, which can be used for selection.
624        """
625        return list(Scantable.get_column_names(self))
626
627    def get_tsys(self, row=-1):
628        """\
629        Return the System temperatures.
630
631        Parameters:
632
633            row:    the rowno to get the information for. (default all rows)
634
635        Returns:
636
637            a list of Tsys values for the current selection
638
639        """
640        if row > -1:
641            return self._get_column(self._gettsys, row)
642        return self._row_callback(self._gettsys, "Tsys")
643
644
645    def get_weather(self, row=-1):
646        """\
647        Return the weather informations.
648
649        Parameters:
650
651            row:    the rowno to get the information for. (default all rows)
652
653        Returns:
654
655            a dict or list of of dicts of values for the current selection
656
657        """
658
659        values = self._get_column(self._get_weather, row)
660        if row > -1:
661            return {'temperature': values[0],
662                    'pressure': values[1], 'humidity' : values[2],
663                    'windspeed' : values[3], 'windaz' : values[4]
664                    }
665        else:
666            out = []
667            for r in values:
668
669                out.append({'temperature': r[0],
670                            'pressure': r[1], 'humidity' : r[2],
671                            'windspeed' : r[3], 'windaz' : r[4]
672                    })
673            return out
674
675    def _row_callback(self, callback, label):
676        out = ""
677        outvec = []
678        sep = '-'*50
679        for i in range(self.nrow()):
680            tm = self._gettime(i)
681            src = self._getsourcename(i)
682            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
683            out += 'Time[%s]:\n' % (tm)
684            if self.nbeam(-1) > 1:
685                out +=  ' Beam[%d] ' % (self.getbeam(i))
686            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (self.getif(i))
687            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (self.getpol(i))
688            outvec.append(callback(i))
689            out += '= %3.3f\n' % (outvec[i])
690            out +=  sep+'\n'
691
692        asaplog.push(sep)
693        asaplog.push(" %s" % (label))
694        asaplog.push(sep)
695        asaplog.push(out)
696        asaplog.post()
697        return outvec
698
699    def _get_column(self, callback, row=-1):
700        """
701        """
702        if row == -1:
703            return [callback(i) for i in range(self.nrow())]
704        else:
705            if  0 <= row < self.nrow():
706                return callback(row)
707
708
709    def get_time(self, row=-1, asdatetime=False):
710        """\
711        Get a list of time stamps for the observations.
712        Return a datetime object for each integration time stamp in the scantable.
713
714        Parameters:
715
716            row:          row no of integration. Default -1 return all rows
717
718            asdatetime:   return values as datetime objects rather than strings
719
720        """
721        from time import strptime
722        from datetime import datetime
723        times = self._get_column(self._gettime, row)
724        if not asdatetime:
725            return times
726        format = "%Y/%m/%d/%H:%M:%S"
727        if isinstance(times, list):
728            return [datetime(*strptime(i, format)[:6]) for i in times]
729        else:
730            return datetime(*strptime(times, format)[:6])
731
732
733    def get_inttime(self, row=-1):
734        """\
735        Get a list of integration times for the observations.
736        Return a time in seconds for each integration in the scantable.
737
738        Parameters:
739
740            row:    row no of integration. Default -1 return all rows.
741
742        """
743        return self._get_column(self._getinttime, row)
744
745
746    def get_sourcename(self, row=-1):
747        """\
748        Get a list source names for the observations.
749        Return a string for each integration in the scantable.
750        Parameters:
751
752            row:    row no of integration. Default -1 return all rows.
753
754        """
755        return self._get_column(self._getsourcename, row)
756
757    def get_elevation(self, row=-1):
758        """\
759        Get a list of elevations for the observations.
760        Return a float for each integration in the scantable.
761
762        Parameters:
763
764            row:    row no of integration. Default -1 return all rows.
765
766        """
767        return self._get_column(self._getelevation, row)
768
769    def get_azimuth(self, row=-1):
770        """\
771        Get a list of azimuths for the observations.
772        Return a float for each integration in the scantable.
773
774        Parameters:
775            row:    row no of integration. Default -1 return all rows.
776
777        """
778        return self._get_column(self._getazimuth, row)
779
780    def get_parangle(self, row=-1):
781        """\
782        Get a list of parallactic angles for the observations.
783        Return a float for each integration in the scantable.
784
785        Parameters:
786
787            row:    row no of integration. Default -1 return all rows.
788
789        """
790        return self._get_column(self._getparangle, row)
791
792    def get_direction(self, row=-1):
793        """
794        Get a list of Positions on the sky (direction) for the observations.
795        Return a string for each integration in the scantable.
796
797        Parameters:
798
799            row:    row no of integration. Default -1 return all rows
800
801        """
802        return self._get_column(self._getdirection, row)
803
804    def get_directionval(self, row=-1):
805        """\
806        Get a list of Positions on the sky (direction) for the observations.
807        Return a float for each integration in the scantable.
808
809        Parameters:
810
811            row:    row no of integration. Default -1 return all rows
812
813        """
814        return self._get_column(self._getdirectionvec, row)
815
816    @asaplog_post_dec
817    def set_unit(self, unit='channel'):
818        """\
819        Set the unit for all following operations on this scantable
820
821        Parameters:
822
823            unit:    optional unit, default is 'channel'. Use one of '*Hz',
824                     'km/s', 'channel' or equivalent ''
825
826        """
827        varlist = vars()
828        if unit in ['', 'pixel', 'channel']:
829            unit = ''
830        inf = list(self._getcoordinfo())
831        inf[0] = unit
832        self._setcoordinfo(inf)
833        self._add_history("set_unit", varlist)
834
835    @asaplog_post_dec
836    def set_instrument(self, instr):
837        """\
838        Set the instrument for subsequent processing.
839
840        Parameters:
841
842            instr:    Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
843                      'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
844
845        """
846        self._setInstrument(instr)
847        self._add_history("set_instument", vars())
848
849    @asaplog_post_dec
850    def set_feedtype(self, feedtype):
851        """\
852        Overwrite the feed type, which might not be set correctly.
853
854        Parameters:
855
856            feedtype:     'linear' or 'circular'
857
858        """
859        self._setfeedtype(feedtype)
860        self._add_history("set_feedtype", vars())
861
862    @asaplog_post_dec
863    def set_doppler(self, doppler='RADIO'):
864        """\
865        Set the doppler for all following operations on this scantable.
866
867        Parameters:
868
869            doppler:    One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
870
871        """
872        varlist = vars()
873        inf = list(self._getcoordinfo())
874        inf[2] = doppler
875        self._setcoordinfo(inf)
876        self._add_history("set_doppler", vars())
877
878    @asaplog_post_dec
879    def set_freqframe(self, frame=None):
880        """\
881        Set the frame type of the Spectral Axis.
882
883        Parameters:
884
885            frame:   an optional frame type, default 'LSRK'. Valid frames are:
886                     'TOPO', 'LSRD', 'LSRK', 'BARY',
887                     'GEO', 'GALACTO', 'LGROUP', 'CMB'
888
889        Example::
890
891            scan.set_freqframe('BARY')
892
893        """
894        frame = frame or rcParams['scantable.freqframe']
895        varlist = vars()
896        # "REST" is not implemented in casacore
897        #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
898        #           'GEO', 'GALACTO', 'LGROUP', 'CMB']
899        valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
900                   'GEO', 'GALACTO', 'LGROUP', 'CMB']
901
902        if frame in valid:
903            inf = list(self._getcoordinfo())
904            inf[1] = frame
905            self._setcoordinfo(inf)
906            self._add_history("set_freqframe", varlist)
907        else:
908            msg  = "Please specify a valid freq type. Valid types are:\n", valid
909            raise TypeError(msg)
910
911    @asaplog_post_dec
912    def set_dirframe(self, frame=""):
913        """\
914        Set the frame type of the Direction on the sky.
915
916        Parameters:
917
918            frame:   an optional frame type, default ''. Valid frames are:
919                     'J2000', 'B1950', 'GALACTIC'
920
921        Example:
922
923            scan.set_dirframe('GALACTIC')
924
925        """
926        varlist = vars()
927        Scantable.set_dirframe(self, frame)
928        self._add_history("set_dirframe", varlist)
929
930    def get_unit(self):
931        """\
932        Get the default unit set in this scantable
933
934        Returns:
935
936            A unit string
937
938        """
939        inf = self._getcoordinfo()
940        unit = inf[0]
941        if unit == '': unit = 'channel'
942        return unit
943
944    @asaplog_post_dec
945    def get_abcissa(self, rowno=0):
946        """\
947        Get the abcissa in the current coordinate setup for the currently
948        selected Beam/IF/Pol
949
950        Parameters:
951
952            rowno:    an optional row number in the scantable. Default is the
953                      first row, i.e. rowno=0
954
955        Returns:
956
957            The abcissa values and the format string (as a dictionary)
958
959        """
960        abc = self._getabcissa(rowno)
961        lbl = self._getabcissalabel(rowno)
962        return abc, lbl
963
964    @asaplog_post_dec
965    def flag(self, mask=None, unflag=False):
966        """\
967        Flag the selected data using an optional channel mask.
968
969        Parameters:
970
971            mask:   an optional channel mask, created with create_mask. Default
972                    (no mask) is all channels.
973
974            unflag:    if True, unflag the data
975
976        """
977        varlist = vars()
978        mask = mask or []
979        self._flag(mask, unflag)
980        self._add_history("flag", varlist)
981
982    @asaplog_post_dec
983    def flag_row(self, rows=[], unflag=False):
984        """\
985        Flag the selected data in row-based manner.
986
987        Parameters:
988
989            rows:   list of row numbers to be flagged. Default is no row
990                    (must be explicitly specified to execute row-based flagging).
991
992            unflag: if True, unflag the data.
993
994        """
995        varlist = vars()
996        self._flag_row(rows, unflag)
997        self._add_history("flag_row", varlist)
998
999    @asaplog_post_dec
1000    def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
1001        """\
1002        Flag the selected data outside a specified range (in channel-base)
1003
1004        Parameters:
1005
1006            uthres:      upper threshold.
1007
1008            dthres:      lower threshold
1009
1010            clipoutside: True for flagging data outside the range [dthres:uthres].
1011                         False for glagging data inside the range.
1012
1013            unflag:      if True, unflag the data.
1014
1015        """
1016        varlist = vars()
1017        self._clip(uthres, dthres, clipoutside, unflag)
1018        self._add_history("clip", varlist)
1019
1020    @asaplog_post_dec
1021    def lag_flag(self, start, end, unit="MHz", insitu=None):
1022        """\
1023        Flag the data in 'lag' space by providing a frequency to remove.
1024        Flagged data in the scantable gets interpolated over the region.
1025        No taper is applied.
1026
1027        Parameters:
1028
1029            start:    the start frequency (really a period within the
1030                      bandwidth)  or period to remove
1031
1032            end:      the end frequency or period to remove
1033
1034            unit:     the frequency unit (default "MHz") or "" for
1035                      explicit lag channels
1036
1037        *Notes*:
1038
1039            It is recommended to flag edges of the band or strong
1040            signals beforehand.
1041
1042        """
1043        if insitu is None: insitu = rcParams['insitu']
1044        self._math._setinsitu(insitu)
1045        varlist = vars()
1046        base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1047        if not (unit == "" or base.has_key(unit)):
1048            raise ValueError("%s is not a valid unit." % unit)
1049        if unit == "":
1050            s = scantable(self._math._lag_flag(self, start, end, "lags"))
1051        else:
1052            s = scantable(self._math._lag_flag(self, start*base[unit],
1053                                               end*base[unit], "frequency"))
1054        s._add_history("lag_flag", varlist)
1055        if insitu:
1056            self._assign(s)
1057        else:
1058            return s
1059
1060    @asaplog_post_dec
1061    def create_mask(self, *args, **kwargs):
1062        """\
1063        Compute and return a mask based on [min, max] windows.
1064        The specified windows are to be INCLUDED, when the mask is
1065        applied.
1066
1067        Parameters:
1068
1069            [min, max], [min2, max2], ...
1070                Pairs of start/end points (inclusive)specifying the regions
1071                to be masked
1072
1073            invert:     optional argument. If specified as True,
1074                        return an inverted mask, i.e. the regions
1075                        specified are EXCLUDED
1076
1077            row:        create the mask using the specified row for
1078                        unit conversions, default is row=0
1079                        only necessary if frequency varies over rows.
1080
1081        Examples::
1082
1083            scan.set_unit('channel')
1084            # a)
1085            msk = scan.create_mask([400, 500], [800, 900])
1086            # masks everything outside 400 and 500
1087            # and 800 and 900 in the unit 'channel'
1088
1089            # b)
1090            msk = scan.create_mask([400, 500], [800, 900], invert=True)
1091            # masks the regions between 400 and 500
1092            # and 800 and 900 in the unit 'channel'
1093
1094            # c)
1095            #mask only channel 400
1096            msk =  scan.create_mask([400])
1097
1098        """
1099        row = kwargs.get("row", 0)
1100        data = self._getabcissa(row)
1101        u = self._getcoordinfo()[0]
1102        if u == "":
1103            u = "channel"
1104        msg = "The current mask window unit is %s" % u
1105        i = self._check_ifs()
1106        if not i:
1107            msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1108        asaplog.push(msg)
1109        n = self.nchan()
1110        msk = _n_bools(n, False)
1111        # test if args is a 'list' or a 'normal *args - UGLY!!!
1112
1113        ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1114             and args or args[0]
1115        for window in ws:
1116            if len(window) == 1:
1117                window = [window[0], window[0]]
1118            if len(window) == 0 or  len(window) > 2:
1119                raise ValueError("A window needs to be defined as [start(, end)]")
1120            if window[0] > window[1]:
1121                tmp = window[0]
1122                window[0] = window[1]
1123                window[1] = tmp
1124            for i in range(n):
1125                if data[i] >= window[0] and data[i] <= window[1]:
1126                    msk[i] = True
1127        if kwargs.has_key('invert'):
1128            if kwargs.get('invert'):
1129                msk = mask_not(msk)
1130        return msk
1131
1132    def get_masklist(self, mask=None, row=0):
1133        """\
1134        Compute and return a list of mask windows, [min, max].
1135
1136        Parameters:
1137
1138            mask:       channel mask, created with create_mask.
1139
1140            row:        calcutate the masklist using the specified row
1141                        for unit conversions, default is row=0
1142                        only necessary if frequency varies over rows.
1143
1144        Returns:
1145
1146            [min, max], [min2, max2], ...
1147                Pairs of start/end points (inclusive)specifying
1148                the masked regions
1149
1150        """
1151        if not (isinstance(mask,list) or isinstance(mask, tuple)):
1152            raise TypeError("The mask should be list or tuple.")
1153        if len(mask) < 2:
1154            raise TypeError("The mask elements should be > 1")
1155        if self.nchan() != len(mask):
1156            msg = "Number of channels in scantable != number of mask elements"
1157            raise TypeError(msg)
1158        data = self._getabcissa(row)
1159        u = self._getcoordinfo()[0]
1160        if u == "":
1161            u = "channel"
1162        msg = "The current mask window unit is %s" % u
1163        i = self._check_ifs()
1164        if not i:
1165            msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1166        asaplog.push(msg)
1167        masklist=[]
1168        ist, ien = None, None
1169        ist, ien=self.get_mask_indices(mask)
1170        if ist is not None and ien is not None:
1171            for i in xrange(len(ist)):
1172                range=[data[ist[i]],data[ien[i]]]
1173                range.sort()
1174                masklist.append([range[0],range[1]])
1175        return masklist
1176
1177    def get_mask_indices(self, mask=None):
1178        """\
1179        Compute and Return lists of mask start indices and mask end indices.
1180
1181        Parameters:
1182
1183            mask:       channel mask, created with create_mask.
1184
1185        Returns:
1186
1187            List of mask start indices and that of mask end indices,
1188            i.e., [istart1,istart2,....], [iend1,iend2,....].
1189
1190        """
1191        if not (isinstance(mask,list) or isinstance(mask, tuple)):
1192            raise TypeError("The mask should be list or tuple.")
1193        if len(mask) < 2:
1194            raise TypeError("The mask elements should be > 1")
1195        istart=[]
1196        iend=[]
1197        if mask[0]: istart.append(0)
1198        for i in range(len(mask)-1):
1199            if not mask[i] and mask[i+1]:
1200                istart.append(i+1)
1201            elif mask[i] and not mask[i+1]:
1202                iend.append(i)
1203        if mask[len(mask)-1]: iend.append(len(mask)-1)
1204        if len(istart) != len(iend):
1205            raise RuntimeError("Numbers of mask start != mask end.")
1206        for i in range(len(istart)):
1207            if istart[i] > iend[i]:
1208                raise RuntimeError("Mask start index > mask end index")
1209                break
1210        return istart,iend
1211
1212#    def get_restfreqs(self):
1213#        """
1214#        Get the restfrequency(s) stored in this scantable.
1215#        The return value(s) are always of unit 'Hz'
1216#        Parameters:
1217#            none
1218#        Returns:
1219#            a list of doubles
1220#        """
1221#        return list(self._getrestfreqs())
1222
1223    def get_restfreqs(self, ids=None):
1224        """\
1225        Get the restfrequency(s) stored in this scantable.
1226        The return value(s) are always of unit 'Hz'
1227
1228        Parameters:
1229
1230            ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1231                 be retrieved
1232
1233        Returns:
1234
1235            dictionary containing ids and a list of doubles for each id
1236
1237        """
1238        if ids is None:
1239            rfreqs={}
1240            idlist = self.getmolnos()
1241            for i in idlist:
1242                rfreqs[i]=list(self._getrestfreqs(i))
1243            return rfreqs
1244        else:
1245            if type(ids)==list or type(ids)==tuple:
1246                rfreqs={}
1247                for i in ids:
1248                    rfreqs[i]=list(self._getrestfreqs(i))
1249                return rfreqs
1250            else:
1251                return list(self._getrestfreqs(ids))
1252            #return list(self._getrestfreqs(ids))
1253
1254    def set_restfreqs(self, freqs=None, unit='Hz'):
1255        """\
1256        Set or replace the restfrequency specified and
1257        If the 'freqs' argument holds a scalar,
1258        then that rest frequency will be applied to all the selected
1259        data.  If the 'freqs' argument holds
1260        a vector, then it MUST be of equal or smaller length than
1261        the number of IFs (and the available restfrequencies will be
1262        replaced by this vector).  In this case, *all* data have
1263        the restfrequency set per IF according
1264        to the corresponding value you give in the 'freqs' vector.
1265        E.g. 'freqs=[1e9, 2e9]'  would mean IF 0 gets restfreq 1e9 and
1266        IF 1 gets restfreq 2e9.
1267
1268        You can also specify the frequencies via a linecatalog.
1269
1270        Parameters:
1271
1272            freqs:   list of rest frequency values or string idenitfiers
1273
1274            unit:    unit for rest frequency (default 'Hz')
1275
1276
1277        Example::
1278
1279            # set the given restfrequency for the all currently selected IFs
1280            scan.set_restfreqs(freqs=1.4e9)
1281            # set restfrequencies for the n IFs  (n > 1) in the order of the
1282            # list, i.e
1283            # IF0 -> 1.4e9, IF1 ->  1.41e9, IF3 -> 1.42e9
1284            # len(list_of_restfreqs) == nIF
1285            # for nIF == 1 the following will set multiple restfrequency for
1286            # that IF
1287            scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
1288            # set multiple restfrequencies per IF. as a list of lists where
1289            # the outer list has nIF elements, the inner s arbitrary
1290            scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
1291
1292       *Note*:
1293
1294            To do more sophisticate Restfrequency setting, e.g. on a
1295            source and IF basis, use scantable.set_selection() before using
1296            this function::
1297
1298                # provided your scantable is called scan
1299                selection = selector()
1300                selection.set_name("ORION*")
1301                selection.set_ifs([1])
1302                scan.set_selection(selection)
1303                scan.set_restfreqs(freqs=86.6e9)
1304
1305        """
1306        varlist = vars()
1307        from asap import linecatalog
1308        # simple  value
1309        if isinstance(freqs, int) or isinstance(freqs, float):
1310            self._setrestfreqs([freqs], [""], unit)
1311        # list of values
1312        elif isinstance(freqs, list) or isinstance(freqs, tuple):
1313            # list values are scalars
1314            if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
1315                if len(freqs) == 1:
1316                    self._setrestfreqs(freqs, [""], unit)
1317                else:
1318                    # allow the 'old' mode of setting mulitple IFs
1319                    sel = selector()
1320                    savesel = self._getselection()
1321                    iflist = self.getifnos()
1322                    if len(freqs)>len(iflist):
1323                        raise ValueError("number of elements in list of list "
1324                                         "exeeds the current IF selections")
1325                    iflist = self.getifnos()
1326                    for i, fval in enumerate(freqs):
1327                        sel.set_ifs(iflist[i])
1328                        self._setselection(sel)
1329                        self._setrestfreqs([fval], [""], unit)
1330                    self._setselection(savesel)
1331
1332            # list values are dict, {'value'=, 'name'=)
1333            elif isinstance(freqs[-1], dict):
1334                values = []
1335                names = []
1336                for d in freqs:
1337                    values.append(d["value"])
1338                    names.append(d["name"])
1339                self._setrestfreqs(values, names, unit)
1340            elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
1341                sel = selector()
1342                savesel = self._getselection()
1343                iflist = self.getifnos()
1344                if len(freqs)>len(iflist):
1345                    raise ValueError("number of elements in list of list exeeds"
1346                                     " the current IF selections")
1347                for i, fval in enumerate(freqs):
1348                    sel.set_ifs(iflist[i])
1349                    self._setselection(sel)
1350                    self._setrestfreqs(fval, [""], unit)
1351                self._setselection(savesel)
1352        # freqs are to be taken from a linecatalog
1353        elif isinstance(freqs, linecatalog):
1354            sel = selector()
1355            savesel = self._getselection()
1356            for i in xrange(freqs.nrow()):
1357                sel.set_ifs(iflist[i])
1358                self._setselection(sel)
1359                self._setrestfreqs([freqs.get_frequency(i)],
1360                                   [freqs.get_name(i)], "MHz")
1361                # ensure that we are not iterating past nIF
1362                if i == self.nif()-1: break
1363            self._setselection(savesel)
1364        else:
1365            return
1366        self._add_history("set_restfreqs", varlist)
1367
1368    def shift_refpix(self, delta):
1369        """\
1370        Shift the reference pixel of the Spectra Coordinate by an
1371        integer amount.
1372
1373        Parameters:
1374
1375            delta:   the amount to shift by
1376
1377        *Note*:
1378
1379            Be careful using this with broadband data.
1380
1381        """
1382        Scantable.shift_refpix(self, delta)
1383
1384    @asaplog_post_dec
1385    def history(self, filename=None):
1386        """\
1387        Print the history. Optionally to a file.
1388
1389        Parameters:
1390
1391            filename:    The name  of the file to save the history to.
1392
1393        """
1394        hist = list(self._gethistory())
1395        out = "-"*80
1396        for h in hist:
1397            if h.startswith("---"):
1398                out = "\n".join([out, h])
1399            else:
1400                items = h.split("##")
1401                date = items[0]
1402                func = items[1]
1403                items = items[2:]
1404                out += "\n"+date+"\n"
1405                out += "Function: %s\n  Parameters:" % (func)
1406                for i in items:
1407                    s = i.split("=")
1408                    out += "\n   %s = %s" % (s[0], s[1])
1409                out = "\n".join([out, "-"*80])
1410        if filename is not None:
1411            if filename is "":
1412                filename = 'scantable_history.txt'
1413            import os
1414            filename = os.path.expandvars(os.path.expanduser(filename))
1415            if not os.path.isdir(filename):
1416                data = open(filename, 'w')
1417                data.write(out)
1418                data.close()
1419            else:
1420                msg = "Illegal file name '%s'." % (filename)
1421                raise IOError(msg)
1422        return page(out)
1423    #
1424    # Maths business
1425    #
1426    @asaplog_post_dec
1427    def average_time(self, mask=None, scanav=False, weight='tint', align=False):
1428        """\
1429        Return the (time) weighted average of a scan.
1430
1431        *Note*:
1432
1433            in channels only - align if necessary
1434
1435        Parameters:
1436
1437            mask:     an optional mask (only used for 'var' and 'tsys'
1438                      weighting)
1439
1440            scanav:   True averages each scan separately
1441                      False (default) averages all scans together,
1442
1443            weight:   Weighting scheme.
1444                      'none'     (mean no weight)
1445                      'var'      (1/var(spec) weighted)
1446                      'tsys'     (1/Tsys**2 weighted)
1447                      'tint'     (integration time weighted)
1448                      'tintsys'  (Tint/Tsys**2)
1449                      'median'   ( median averaging)
1450                      The default is 'tint'
1451
1452            align:    align the spectra in velocity before averaging. It takes
1453                      the time of the first spectrum as reference time.
1454
1455        Example::
1456
1457            # time average the scantable without using a mask
1458            newscan = scan.average_time()
1459
1460        """
1461        varlist = vars()
1462        weight = weight or 'TINT'
1463        mask = mask or ()
1464        scanav = (scanav and 'SCAN') or 'NONE'
1465        scan = (self, )
1466
1467        if align:
1468            scan = (self.freq_align(insitu=False), )
1469        s = None
1470        if weight.upper() == 'MEDIAN':
1471            s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1472                                                     scanav))
1473        else:
1474            s = scantable(self._math._average(scan, mask, weight.upper(),
1475                          scanav))
1476        s._add_history("average_time", varlist)
1477        return s
1478
1479    @asaplog_post_dec
1480    def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
1481        """\
1482        Return a scan where all spectra are converted to either
1483        Jansky or Kelvin depending upon the flux units of the scan table.
1484        By default the function tries to look the values up internally.
1485        If it can't find them (or if you want to over-ride), you must
1486        specify EITHER jyperk OR eta (and D which it will try to look up
1487        also if you don't set it). jyperk takes precedence if you set both.
1488
1489        Parameters:
1490
1491            jyperk:      the Jy / K conversion factor
1492
1493            eta:         the aperture efficiency
1494
1495            d:           the geomtric diameter (metres)
1496
1497            insitu:      if False a new scantable is returned.
1498                         Otherwise, the scaling is done in-situ
1499                         The default is taken from .asaprc (False)
1500
1501        """
1502        if insitu is None: insitu = rcParams['insitu']
1503        self._math._setinsitu(insitu)
1504        varlist = vars()
1505        jyperk = jyperk or -1.0
1506        d = d or -1.0
1507        eta = eta or -1.0
1508        s = scantable(self._math._convertflux(self, d, eta, jyperk))
1509        s._add_history("convert_flux", varlist)
1510        if insitu: self._assign(s)
1511        else: return s
1512
1513    @asaplog_post_dec
1514    def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1515        """\
1516        Return a scan after applying a gain-elevation correction.
1517        The correction can be made via either a polynomial or a
1518        table-based interpolation (and extrapolation if necessary).
1519        You specify polynomial coefficients, an ascii table or neither.
1520        If you specify neither, then a polynomial correction will be made
1521        with built in coefficients known for certain telescopes (an error
1522        will occur if the instrument is not known).
1523        The data and Tsys are *divided* by the scaling factors.
1524
1525        Parameters:
1526
1527            poly:        Polynomial coefficients (default None) to compute a
1528                         gain-elevation correction as a function of
1529                         elevation (in degrees).
1530
1531            filename:    The name of an ascii file holding correction factors.
1532                         The first row of the ascii file must give the column
1533                         names and these MUST include columns
1534                         "ELEVATION" (degrees) and "FACTOR" (multiply data
1535                         by this) somewhere.
1536                         The second row must give the data type of the
1537                         column. Use 'R' for Real and 'I' for Integer.
1538                         An example file would be
1539                         (actual factors are arbitrary) :
1540
1541                         TIME ELEVATION FACTOR
1542                         R R R
1543                         0.1 0 0.8
1544                         0.2 20 0.85
1545                         0.3 40 0.9
1546                         0.4 60 0.85
1547                         0.5 80 0.8
1548                         0.6 90 0.75
1549
1550            method:      Interpolation method when correcting from a table.
1551                         Values are  "nearest", "linear" (default), "cubic"
1552                         and "spline"
1553
1554            insitu:      if False a new scantable is returned.
1555                         Otherwise, the scaling is done in-situ
1556                         The default is taken from .asaprc (False)
1557
1558        """
1559
1560        if insitu is None: insitu = rcParams['insitu']
1561        self._math._setinsitu(insitu)
1562        varlist = vars()
1563        poly = poly or ()
1564        from os.path import expandvars
1565        filename = expandvars(filename)
1566        s = scantable(self._math._gainel(self, poly, filename, method))
1567        s._add_history("gain_el", varlist)
1568        if insitu:
1569            self._assign(s)
1570        else:
1571            return s
1572
1573    @asaplog_post_dec
1574    def freq_align(self, reftime=None, method='cubic', insitu=None):
1575        """\
1576        Return a scan where all rows have been aligned in frequency/velocity.
1577        The alignment frequency frame (e.g. LSRK) is that set by function
1578        set_freqframe.
1579
1580        Parameters:
1581
1582            reftime:     reference time to align at. By default, the time of
1583                         the first row of data is used.
1584
1585            method:      Interpolation method for regridding the spectra.
1586                         Choose from "nearest", "linear", "cubic" (default)
1587                         and "spline"
1588
1589            insitu:      if False a new scantable is returned.
1590                         Otherwise, the scaling is done in-situ
1591                         The default is taken from .asaprc (False)
1592
1593        """
1594        if insitu is None: insitu = rcParams["insitu"]
1595        self._math._setinsitu(insitu)
1596        varlist = vars()
1597        reftime = reftime or ""
1598        s = scantable(self._math._freq_align(self, reftime, method))
1599        s._add_history("freq_align", varlist)
1600        if insitu: self._assign(s)
1601        else: return s
1602
1603    @asaplog_post_dec
1604    def opacity(self, tau=None, insitu=None):
1605        """\
1606        Apply an opacity correction. The data
1607        and Tsys are multiplied by the correction factor.
1608
1609        Parameters:
1610
1611            tau:         (list of) opacity from which the correction factor is
1612                         exp(tau*ZD)
1613                         where ZD is the zenith-distance.
1614                         If a list is provided, it has to be of length nIF,
1615                         nIF*nPol or 1 and in order of IF/POL, e.g.
1616                         [opif0pol0, opif0pol1, opif1pol0 ...]
1617                         if tau is `None` the opacities are determined from a
1618                         model.
1619
1620            insitu:      if False a new scantable is returned.
1621                         Otherwise, the scaling is done in-situ
1622                         The default is taken from .asaprc (False)
1623
1624        """
1625        if insitu is None: insitu = rcParams['insitu']
1626        self._math._setinsitu(insitu)
1627        varlist = vars()
1628        if not hasattr(tau, "__len__"):
1629            tau = [tau]
1630        s = scantable(self._math._opacity(self, tau))
1631        s._add_history("opacity", varlist)
1632        if insitu: self._assign(s)
1633        else: return s
1634
1635    @asaplog_post_dec
1636    def bin(self, width=5, insitu=None):
1637        """\
1638        Return a scan where all spectra have been binned up.
1639
1640        Parameters:
1641
1642            width:       The bin width (default=5) in pixels
1643
1644            insitu:      if False a new scantable is returned.
1645                         Otherwise, the scaling is done in-situ
1646                         The default is taken from .asaprc (False)
1647
1648        """
1649        if insitu is None: insitu = rcParams['insitu']
1650        self._math._setinsitu(insitu)
1651        varlist = vars()
1652        s = scantable(self._math._bin(self, width))
1653        s._add_history("bin", varlist)
1654        if insitu:
1655            self._assign(s)
1656        else:
1657            return s
1658
1659    @asaplog_post_dec
1660    def resample(self, width=5, method='cubic', insitu=None):
1661        """\
1662        Return a scan where all spectra have been binned up.
1663
1664        Parameters:
1665
1666            width:       The bin width (default=5) in pixels
1667
1668            method:      Interpolation method when correcting from a table.
1669                         Values are  "nearest", "linear", "cubic" (default)
1670                         and "spline"
1671
1672            insitu:      if False a new scantable is returned.
1673                         Otherwise, the scaling is done in-situ
1674                         The default is taken from .asaprc (False)
1675
1676        """
1677        if insitu is None: insitu = rcParams['insitu']
1678        self._math._setinsitu(insitu)
1679        varlist = vars()
1680        s = scantable(self._math._resample(self, method, width))
1681        s._add_history("resample", varlist)
1682        if insitu: self._assign(s)
1683        else: return s
1684
1685    @asaplog_post_dec
1686    def average_pol(self, mask=None, weight='none'):
1687        """\
1688        Average the Polarisations together.
1689
1690        Parameters:
1691
1692            mask:        An optional mask defining the region, where the
1693                         averaging will be applied. The output will have all
1694                         specified points masked.
1695
1696            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1697                         weighted), or 'tsys' (1/Tsys**2 weighted)
1698
1699        """
1700        varlist = vars()
1701        mask = mask or ()
1702        s = scantable(self._math._averagepol(self, mask, weight.upper()))
1703        s._add_history("average_pol", varlist)
1704        return s
1705
1706    @asaplog_post_dec
1707    def average_beam(self, mask=None, weight='none'):
1708        """\
1709        Average the Beams together.
1710
1711        Parameters:
1712            mask:        An optional mask defining the region, where the
1713                         averaging will be applied. The output will have all
1714                         specified points masked.
1715
1716            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1717                         weighted), or 'tsys' (1/Tsys**2 weighted)
1718
1719        """
1720        varlist = vars()
1721        mask = mask or ()
1722        s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1723        s._add_history("average_beam", varlist)
1724        return s
1725
1726    def parallactify(self, pflag):
1727        """\
1728        Set a flag to indicate whether this data should be treated as having
1729        been 'parallactified' (total phase == 0.0)
1730
1731        Parameters:
1732
1733            pflag:  Bool indicating whether to turn this on (True) or
1734                    off (False)
1735
1736        """
1737        varlist = vars()
1738        self._parallactify(pflag)
1739        self._add_history("parallactify", varlist)
1740
1741    @asaplog_post_dec
1742    def convert_pol(self, poltype=None):
1743        """\
1744        Convert the data to a different polarisation type.
1745        Note that you will need cross-polarisation terms for most conversions.
1746
1747        Parameters:
1748
1749            poltype:    The new polarisation type. Valid types are:
1750                        "linear", "circular", "stokes" and "linpol"
1751
1752        """
1753        varlist = vars()
1754        s = scantable(self._math._convertpol(self, poltype))
1755        s._add_history("convert_pol", varlist)
1756        return s
1757
1758    @asaplog_post_dec
1759    def smooth(self, kernel="hanning", width=5.0, order=2, plot=False, insitu=None):
1760        """\
1761        Smooth the spectrum by the specified kernel (conserving flux).
1762
1763        Parameters:
1764
1765            kernel:     The type of smoothing kernel. Select from
1766                        'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1767                        or 'poly'
1768
1769            width:      The width of the kernel in pixels. For hanning this is
1770                        ignored otherwise it defauls to 5 pixels.
1771                        For 'gaussian' it is the Full Width Half
1772                        Maximum. For 'boxcar' it is the full width.
1773                        For 'rmedian' and 'poly' it is the half width.
1774
1775            order:      Optional parameter for 'poly' kernel (default is 2), to
1776                        specify the order of the polnomial. Ignored by all other
1777                        kernels.
1778
1779            plot:       plot the original and the smoothed spectra.
1780                        In this each indivual fit has to be approved, by
1781                        typing 'y' or 'n'
1782
1783            insitu:     if False a new scantable is returned.
1784                        Otherwise, the scaling is done in-situ
1785                        The default is taken from .asaprc (False)
1786
1787        """
1788        if insitu is None: insitu = rcParams['insitu']
1789        self._math._setinsitu(insitu)
1790        varlist = vars()
1791
1792        if plot: orgscan = self.copy()
1793
1794        s = scantable(self._math._smooth(self, kernel.lower(), width, order))
1795        s._add_history("smooth", varlist)
1796
1797        if plot:
1798            if rcParams['plotter.gui']:
1799                from asap.asaplotgui import asaplotgui as asaplot
1800            else:
1801                from asap.asaplot import asaplot
1802            self._p=asaplot()
1803            self._p.set_panels()
1804            ylab=s._get_ordinate_label()
1805            #self._p.palette(0,["#777777","red"])
1806            for r in xrange(s.nrow()):
1807                xsm=s._getabcissa(r)
1808                ysm=s._getspectrum(r)
1809                xorg=orgscan._getabcissa(r)
1810                yorg=orgscan._getspectrum(r)
1811                self._p.clear()
1812                self._p.hold()
1813                self._p.set_axes('ylabel',ylab)
1814                self._p.set_axes('xlabel',s._getabcissalabel(r))
1815                self._p.set_axes('title',s._getsourcename(r))
1816                self._p.set_line(label='Original',color="#777777")
1817                self._p.plot(xorg,yorg)
1818                self._p.set_line(label='Smoothed',color="red")
1819                self._p.plot(xsm,ysm)
1820                ### Ugly part for legend
1821                for i in [0,1]:
1822                    self._p.subplots[0]['lines'].append([self._p.subplots[0]['axes'].lines[i]])
1823                self._p.release()
1824                ### Ugly part for legend
1825                self._p.subplots[0]['lines']=[]
1826                res = raw_input("Accept smoothing ([y]/n): ")
1827                if res.upper() == 'N':
1828                    s._setspectrum(yorg, r)
1829            self._p.unmap()
1830            self._p = None
1831            del orgscan
1832
1833        if insitu: self._assign(s)
1834        else: return s
1835
1836    @asaplog_post_dec
1837    def poly_baseline(self, mask=None, order=0, plot=False, uselin=False,
1838                      insitu=None):
1839        """\
1840        Return a scan which has been baselined (all rows) by a polynomial.
1841
1842        Parameters:
1843
1844            mask:       an optional mask
1845
1846            order:      the order of the polynomial (default is 0)
1847
1848            plot:       plot the fit and the residual. In this each
1849                        indivual fit has to be approved, by typing 'y'
1850                        or 'n'
1851
1852            uselin:     use linear polynomial fit
1853
1854            insitu:     if False a new scantable is returned.
1855                        Otherwise, the scaling is done in-situ
1856                        The default is taken from .asaprc (False)
1857
1858        Example::
1859
1860            # return a scan baselined by a third order polynomial,
1861            # not using a mask
1862            bscan = scan.poly_baseline(order=3)
1863
1864        """
1865        if insitu is None: insitu = rcParams['insitu']
1866        if not insitu:
1867            workscan = self.copy()
1868        else:
1869            workscan = self
1870        varlist = vars()
1871        if mask is None:
1872            mask = [True for i in xrange(self.nchan(-1))]
1873
1874        from asap.asapfitter import fitter
1875        try:
1876            f = fitter()
1877            if uselin:
1878                f.set_function(lpoly=order)
1879            else:
1880                f.set_function(poly=order)
1881
1882            rows = range(workscan.nrow())
1883            if len(rows) > 0:
1884                self.blpars = []
1885
1886            for r in rows:
1887                # take into account flagtra info (CAS-1434)
1888                flagtra = workscan._getmask(r)
1889                actualmask = mask[:]
1890                if len(actualmask) == 0:
1891                    actualmask = list(flagtra[:])
1892                else:
1893                    if len(actualmask) != len(flagtra):
1894                        raise RuntimeError, "Mask and flagtra have different length"
1895                    else:
1896                        for i in range(0, len(actualmask)):
1897                            actualmask[i] = actualmask[i] and flagtra[i]
1898                f.set_scan(workscan, actualmask)
1899                f.x = workscan._getabcissa(r)
1900                f.y = workscan._getspectrum(r)
1901                f.data = None
1902                f.fit()
1903                if plot:
1904                    f.plot(residual=True)
1905                    x = raw_input("Accept fit ( [y]/n ): ")
1906                    if x.upper() == 'N':
1907                        self.blpars.append(None)
1908                        continue
1909                workscan._setspectrum(f.fitter.getresidual(), r)
1910                self.blpars.append(f.get_parameters())
1911
1912            if plot:
1913                f._p.unmap()
1914                f._p = None
1915            workscan._add_history("poly_baseline", varlist)
1916            if insitu:
1917                self._assign(workscan)
1918            else:
1919                return workscan
1920        except RuntimeError:
1921            msg = "The fit failed, possibly because it didn't converge."
1922            raise RuntimeError(msg)
1923
1924
1925    def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
1926                           threshold=3, chan_avg_limit=1, plot=False,
1927                           insitu=None):
1928        """\
1929        Return a scan which has been baselined (all rows) by a polynomial.
1930        Spectral lines are detected first using linefinder and masked out
1931        to avoid them affecting the baseline solution.
1932
1933        Parameters:
1934
1935            mask:       an optional mask retreived from scantable
1936
1937            edge:       an optional number of channel to drop at the edge of
1938                        spectrum. If only one value is
1939                        specified, the same number will be dropped from
1940                        both sides of the spectrum. Default is to keep
1941                        all channels. Nested tuples represent individual
1942                        edge selection for different IFs (a number of spectral
1943                        channels can be different)
1944
1945            order:      the order of the polynomial (default is 0)
1946
1947            threshold:  the threshold used by line finder. It is better to
1948                        keep it large as only strong lines affect the
1949                        baseline solution.
1950
1951            chan_avg_limit:
1952                        a maximum number of consequtive spectral channels to
1953                        average during the search of weak and broad lines.
1954                        The default is no averaging (and no search for weak
1955                        lines). If such lines can affect the fitted baseline
1956                        (e.g. a high order polynomial is fitted), increase this
1957                        parameter (usually values up to 8 are reasonable). Most
1958                        users of this method should find the default value
1959                        sufficient.
1960
1961            plot:       plot the fit and the residual. In this each
1962                        indivual fit has to be approved, by typing 'y'
1963                        or 'n'
1964
1965            insitu:     if False a new scantable is returned.
1966                        Otherwise, the scaling is done in-situ
1967                        The default is taken from .asaprc (False)
1968
1969
1970        Example::
1971
1972            scan2 = scan.auto_poly_baseline(order=7, insitu=False)
1973
1974        """
1975        if insitu is None: insitu = rcParams['insitu']
1976        varlist = vars()
1977        from asap.asapfitter import fitter
1978        from asap.asaplinefind import linefinder
1979        from asap import _is_sequence_or_number as _is_valid
1980
1981        # check whether edge is set up for each IF individually
1982        individualedge = False;
1983        if len(edge) > 1:
1984            if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1985                individualedge = True;
1986
1987        if not _is_valid(edge, int) and not individualedge:
1988            raise ValueError, "Parameter 'edge' has to be an integer or a \
1989            pair of integers specified as a tuple. Nested tuples are allowed \
1990            to make individual selection for different IFs."
1991
1992        curedge = (0, 0)
1993        if individualedge:
1994            for edgepar in edge:
1995                if not _is_valid(edgepar, int):
1996                    raise ValueError, "Each element of the 'edge' tuple has \
1997                                       to be a pair of integers or an integer."
1998        else:
1999            curedge = edge;
2000
2001        # setup fitter
2002        f = fitter()
2003        f.set_function(poly=order)
2004
2005        # setup line finder
2006        fl = linefinder()
2007        fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
2008
2009        if not insitu:
2010            workscan = self.copy()
2011        else:
2012            workscan = self
2013
2014        fl.set_scan(workscan)
2015
2016        rows = range(workscan.nrow())
2017        # Save parameters of baseline fits & masklists as a class attribute.
2018        # NOTICE: It does not reflect changes in scantable!
2019        if len(rows) > 0:
2020            self.blpars=[]
2021            self.masklists=[]
2022        asaplog.push("Processing:")
2023        for r in rows:
2024            msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
2025                (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
2026                 workscan.getpol(r), workscan.getcycle(r))
2027            asaplog.push(msg, False)
2028
2029            # figure out edge parameter
2030            if individualedge:
2031                if len(edge) >= workscan.getif(r):
2032                    raise RuntimeError, "Number of edge elements appear to " \
2033                                        "be less than the number of IFs"
2034                    curedge = edge[workscan.getif(r)]
2035
2036            # take into account flagtra info (CAS-1434)
2037            flagtra = workscan._getmask(r)
2038            actualmask = mask[:]
2039            if len(actualmask) == 0:
2040                actualmask = list(flagtra[:])
2041            else:
2042                if len(actualmask) != len(flagtra):
2043                    raise RuntimeError, "Mask and flagtra have different length"
2044                else:
2045                    for i in range(0, len(actualmask)):
2046                        actualmask[i] = actualmask[i] and flagtra[i]
2047
2048            # setup line finder
2049            fl.find_lines(r, actualmask, curedge)
2050            outmask=fl.get_mask()
2051            f.set_scan(workscan, fl.get_mask())
2052            f.x = workscan._getabcissa(r)
2053            f.y = workscan._getspectrum(r)
2054            f.data = None
2055            f.fit()
2056
2057            # Show mask list
2058            masklist=workscan.get_masklist(fl.get_mask(),row=r)
2059            msg = "mask range: "+str(masklist)
2060            asaplog.push(msg, False)
2061
2062            if plot:
2063                f.plot(residual=True)
2064                x = raw_input("Accept fit ( [y]/n ): ")
2065                if x.upper() == 'N':
2066                    self.blpars.append(None)
2067                    self.masklists.append(None)
2068                    continue
2069
2070            workscan._setspectrum(f.fitter.getresidual(), r)
2071            self.blpars.append(f.get_parameters())
2072            self.masklists.append(masklist)
2073        if plot:
2074            f._p.unmap()
2075            f._p = None
2076        workscan._add_history("auto_poly_baseline", varlist)
2077        if insitu:
2078            self._assign(workscan)
2079        else:
2080            return workscan
2081
2082    @asaplog_post_dec
2083    def rotate_linpolphase(self, angle):
2084        """\
2085        Rotate the phase of the complex polarization O=Q+iU correlation.
2086        This is always done in situ in the raw data.  So if you call this
2087        function more than once then each call rotates the phase further.
2088
2089        Parameters:
2090
2091            angle:   The angle (degrees) to rotate (add) by.
2092
2093        Example::
2094
2095            scan.rotate_linpolphase(2.3)
2096
2097        """
2098        varlist = vars()
2099        self._math._rotate_linpolphase(self, angle)
2100        self._add_history("rotate_linpolphase", varlist)
2101        return
2102
2103    @asaplog_post_dec
2104    def rotate_xyphase(self, angle):
2105        """\
2106        Rotate the phase of the XY correlation.  This is always done in situ
2107        in the data.  So if you call this function more than once
2108        then each call rotates the phase further.
2109
2110        Parameters:
2111
2112            angle:   The angle (degrees) to rotate (add) by.
2113
2114        Example::
2115
2116            scan.rotate_xyphase(2.3)
2117
2118        """
2119        varlist = vars()
2120        self._math._rotate_xyphase(self, angle)
2121        self._add_history("rotate_xyphase", varlist)
2122        return
2123
2124    @asaplog_post_dec
2125    def swap_linears(self):
2126        """\
2127        Swap the linear polarisations XX and YY, or better the first two
2128        polarisations as this also works for ciculars.
2129        """
2130        varlist = vars()
2131        self._math._swap_linears(self)
2132        self._add_history("swap_linears", varlist)
2133        return
2134
2135    @asaplog_post_dec
2136    def invert_phase(self):
2137        """\
2138        Invert the phase of the complex polarisation
2139        """
2140        varlist = vars()
2141        self._math._invert_phase(self)
2142        self._add_history("invert_phase", varlist)
2143        return
2144
2145    @asaplog_post_dec
2146    def add(self, offset, insitu=None):
2147        """\
2148        Return a scan where all spectra have the offset added
2149
2150        Parameters:
2151
2152            offset:      the offset
2153
2154            insitu:      if False a new scantable is returned.
2155                         Otherwise, the scaling is done in-situ
2156                         The default is taken from .asaprc (False)
2157
2158        """
2159        if insitu is None: insitu = rcParams['insitu']
2160        self._math._setinsitu(insitu)
2161        varlist = vars()
2162        s = scantable(self._math._unaryop(self, offset, "ADD", False))
2163        s._add_history("add", varlist)
2164        if insitu:
2165            self._assign(s)
2166        else:
2167            return s
2168
2169    @asaplog_post_dec
2170    def scale(self, factor, tsys=True, insitu=None):
2171        """\
2172
2173        Return a scan where all spectra are scaled by the give 'factor'
2174
2175        Parameters:
2176
2177            factor:      the scaling factor (float or 1D float list)
2178
2179            insitu:      if False a new scantable is returned.
2180                         Otherwise, the scaling is done in-situ
2181                         The default is taken from .asaprc (False)
2182
2183            tsys:        if True (default) then apply the operation to Tsys
2184                         as well as the data
2185
2186        """
2187        if insitu is None: insitu = rcParams['insitu']
2188        self._math._setinsitu(insitu)
2189        varlist = vars()
2190        s = None
2191        import numpy
2192        if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
2193            if isinstance(factor[0], list) or isinstance(factor[0], numpy.ndarray):
2194                from asapmath import _array2dOp
2195                s = _array2dOp( self.copy(), factor, "MUL", tsys )
2196            else:
2197                s = scantable( self._math._arrayop( self.copy(), factor, "MUL", tsys ) )
2198        else:
2199            s = scantable(self._math._unaryop(self.copy(), factor, "MUL", tsys))
2200        s._add_history("scale", varlist)
2201        if insitu:
2202            self._assign(s)
2203        else:
2204            return s
2205
2206    def set_sourcetype(self, match, matchtype="pattern",
2207                       sourcetype="reference"):
2208        """\
2209        Set the type of the source to be an source or reference scan
2210        using the provided pattern.
2211
2212        Parameters:
2213
2214            match:          a Unix style pattern, regular expression or selector
2215
2216            matchtype:      'pattern' (default) UNIX style pattern or
2217                            'regex' regular expression
2218
2219            sourcetype:     the type of the source to use (source/reference)
2220
2221        """
2222        varlist = vars()
2223        basesel = self.get_selection()
2224        stype = -1
2225        if sourcetype.lower().startswith("r"):
2226            stype = 1
2227        elif sourcetype.lower().startswith("s"):
2228            stype = 0
2229        else:
2230            raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
2231        if matchtype.lower().startswith("p"):
2232            matchtype = "pattern"
2233        elif matchtype.lower().startswith("r"):
2234            matchtype = "regex"
2235        else:
2236            raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
2237        sel = selector()
2238        if isinstance(match, selector):
2239            sel = match
2240        else:
2241            sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
2242        self.set_selection(basesel+sel)
2243        self._setsourcetype(stype)
2244        self.set_selection(basesel)
2245        self._add_history("set_sourcetype", varlist)
2246
2247    @asaplog_post_dec
2248    @preserve_selection
2249    def auto_quotient(self, preserve=True, mode='paired', verify=False):
2250        """\
2251        This function allows to build quotients automatically.
2252        It assumes the observation to have the same number of
2253        "ons" and "offs"
2254
2255        Parameters:
2256
2257            preserve:       you can preserve (default) the continuum or
2258                            remove it.  The equations used are
2259
2260                            preserve: Output = Toff * (on/off) - Toff
2261
2262                            remove:   Output = Toff * (on/off) - Ton
2263
2264            mode:           the on/off detection mode
2265                            'paired' (default)
2266                            identifies 'off' scans by the
2267                            trailing '_R' (Mopra/Parkes) or
2268                            '_e'/'_w' (Tid) and matches
2269                            on/off pairs from the observing pattern
2270                            'time'
2271                            finds the closest off in time
2272
2273        .. todo:: verify argument is not implemented
2274
2275        """
2276        varlist = vars()
2277        modes = ["time", "paired"]
2278        if not mode in modes:
2279            msg = "please provide valid mode. Valid modes are %s" % (modes)
2280            raise ValueError(msg)
2281        s = None
2282        if mode.lower() == "paired":
2283            sel = self.get_selection()
2284            sel.set_query("SRCTYPE==psoff")
2285            self.set_selection(sel)
2286            offs = self.copy()
2287            sel.set_query("SRCTYPE==pson")
2288            self.set_selection(sel)
2289            ons = self.copy()
2290            s = scantable(self._math._quotient(ons, offs, preserve))
2291        elif mode.lower() == "time":
2292            s = scantable(self._math._auto_quotient(self, mode, preserve))
2293        s._add_history("auto_quotient", varlist)
2294        return s
2295
2296    @asaplog_post_dec
2297    def mx_quotient(self, mask = None, weight='median', preserve=True):
2298        """\
2299        Form a quotient using "off" beams when observing in "MX" mode.
2300
2301        Parameters:
2302
2303            mask:           an optional mask to be used when weight == 'stddev'
2304
2305            weight:         How to average the off beams.  Default is 'median'.
2306
2307            preserve:       you can preserve (default) the continuum or
2308                            remove it.  The equations used are:
2309
2310                                preserve: Output = Toff * (on/off) - Toff
2311
2312                                remove:   Output = Toff * (on/off) - Ton
2313
2314        """
2315        mask = mask or ()
2316        varlist = vars()
2317        on = scantable(self._math._mx_extract(self, 'on'))
2318        preoff = scantable(self._math._mx_extract(self, 'off'))
2319        off = preoff.average_time(mask=mask, weight=weight, scanav=False)
2320        from asapmath  import quotient
2321        q = quotient(on, off, preserve)
2322        q._add_history("mx_quotient", varlist)
2323        return q
2324
2325    @asaplog_post_dec
2326    def freq_switch(self, insitu=None):
2327        """\
2328        Apply frequency switching to the data.
2329
2330        Parameters:
2331
2332            insitu:      if False a new scantable is returned.
2333                         Otherwise, the swictching is done in-situ
2334                         The default is taken from .asaprc (False)
2335
2336        """
2337        if insitu is None: insitu = rcParams['insitu']
2338        self._math._setinsitu(insitu)
2339        varlist = vars()
2340        s = scantable(self._math._freqswitch(self))
2341        s._add_history("freq_switch", varlist)
2342        if insitu:
2343            self._assign(s)
2344        else:
2345            return s
2346
2347    @asaplog_post_dec
2348    def recalc_azel(self):
2349        """Recalculate the azimuth and elevation for each position."""
2350        varlist = vars()
2351        self._recalcazel()
2352        self._add_history("recalc_azel", varlist)
2353        return
2354
2355    @asaplog_post_dec
2356    def __add__(self, other):
2357        varlist = vars()
2358        s = None
2359        if isinstance(other, scantable):
2360            s = scantable(self._math._binaryop(self, other, "ADD"))
2361        elif isinstance(other, float):
2362            s = scantable(self._math._unaryop(self, other, "ADD", False))
2363        else:
2364            raise TypeError("Other input is not a scantable or float value")
2365        s._add_history("operator +", varlist)
2366        return s
2367
2368    @asaplog_post_dec
2369    def __sub__(self, other):
2370        """
2371        implicit on all axes and on Tsys
2372        """
2373        varlist = vars()
2374        s = None
2375        if isinstance(other, scantable):
2376            s = scantable(self._math._binaryop(self, other, "SUB"))
2377        elif isinstance(other, float):
2378            s = scantable(self._math._unaryop(self, other, "SUB", False))
2379        else:
2380            raise TypeError("Other input is not a scantable or float value")
2381        s._add_history("operator -", varlist)
2382        return s
2383
2384    @asaplog_post_dec
2385    def __mul__(self, other):
2386        """
2387        implicit on all axes and on Tsys
2388        """
2389        varlist = vars()
2390        s = None
2391        if isinstance(other, scantable):
2392            s = scantable(self._math._binaryop(self, other, "MUL"))
2393        elif isinstance(other, float):
2394            s = scantable(self._math._unaryop(self, other, "MUL", False))
2395        else:
2396            raise TypeError("Other input is not a scantable or float value")
2397        s._add_history("operator *", varlist)
2398        return s
2399
2400
2401    @asaplog_post_dec
2402    def __div__(self, other):
2403        """
2404        implicit on all axes and on Tsys
2405        """
2406        varlist = vars()
2407        s = None
2408        if isinstance(other, scantable):
2409            s = scantable(self._math._binaryop(self, other, "DIV"))
2410        elif isinstance(other, float):
2411            if other == 0.0:
2412                raise ZeroDivisionError("Dividing by zero is not recommended")
2413            s = scantable(self._math._unaryop(self, other, "DIV", False))
2414        else:
2415            raise TypeError("Other input is not a scantable or float value")
2416        s._add_history("operator /", varlist)
2417        return s
2418
2419    @asaplog_post_dec
2420    def get_fit(self, row=0):
2421        """\
2422        Print or return the stored fits for a row in the scantable
2423
2424        Parameters:
2425
2426            row:    the row which the fit has been applied to.
2427
2428        """
2429        if row > self.nrow():
2430            return
2431        from asap.asapfit import asapfit
2432        fit = asapfit(self._getfit(row))
2433        asaplog.push( '%s' %(fit) )
2434        return fit.as_dict()
2435
2436    def flag_nans(self):
2437        """\
2438        Utility function to flag NaN values in the scantable.
2439        """
2440        import numpy
2441        basesel = self.get_selection()
2442        for i in range(self.nrow()):
2443            sel = self.get_row_selector(i)
2444            self.set_selection(basesel+sel)
2445            nans = numpy.isnan(self._getspectrum(0))
2446        if numpy.any(nans):
2447            bnans = [ bool(v) for v in nans]
2448            self.flag(bnans)
2449        self.set_selection(basesel)
2450
2451    def get_row_selector(self, rowno):
2452        return selector(beams=self.getbeam(rowno),
2453                        ifs=self.getif(rowno),
2454                        pols=self.getpol(rowno),
2455                        scans=self.getscan(rowno),
2456                        cycles=self.getcycle(rowno))
2457
2458    def _add_history(self, funcname, parameters):
2459        if not rcParams['scantable.history']:
2460            return
2461        # create date
2462        sep = "##"
2463        from datetime import datetime
2464        dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
2465        hist = dstr+sep
2466        hist += funcname+sep#cdate+sep
2467        if parameters.has_key('self'): del parameters['self']
2468        for k, v in parameters.iteritems():
2469            if type(v) is dict:
2470                for k2, v2 in v.iteritems():
2471                    hist += k2
2472                    hist += "="
2473                    if isinstance(v2, scantable):
2474                        hist += 'scantable'
2475                    elif k2 == 'mask':
2476                        if isinstance(v2, list) or isinstance(v2, tuple):
2477                            hist += str(self._zip_mask(v2))
2478                        else:
2479                            hist += str(v2)
2480                    else:
2481                        hist += str(v2)
2482            else:
2483                hist += k
2484                hist += "="
2485                if isinstance(v, scantable):
2486                    hist += 'scantable'
2487                elif k == 'mask':
2488                    if isinstance(v, list) or isinstance(v, tuple):
2489                        hist += str(self._zip_mask(v))
2490                    else:
2491                        hist += str(v)
2492                else:
2493                    hist += str(v)
2494            hist += sep
2495        hist = hist[:-2] # remove trailing '##'
2496        self._addhistory(hist)
2497
2498
2499    def _zip_mask(self, mask):
2500        mask = list(mask)
2501        i = 0
2502        segments = []
2503        while mask[i:].count(1):
2504            i += mask[i:].index(1)
2505            if mask[i:].count(0):
2506                j = i + mask[i:].index(0)
2507            else:
2508                j = len(mask)
2509            segments.append([i, j])
2510            i = j
2511        return segments
2512
2513    def _get_ordinate_label(self):
2514        fu = "("+self.get_fluxunit()+")"
2515        import re
2516        lbl = "Intensity"
2517        if re.match(".K.", fu):
2518            lbl = "Brightness Temperature "+ fu
2519        elif re.match(".Jy.", fu):
2520            lbl = "Flux density "+ fu
2521        return lbl
2522
2523    def _check_ifs(self):
2524        nchans = [self.nchan(i) for i in range(self.nif(-1))]
2525        nchans = filter(lambda t: t > 0, nchans)
2526        return (sum(nchans)/len(nchans) == nchans[0])
2527
2528    @asaplog_post_dec
2529    def _fill(self, names, unit, average, getpt, antenna):
2530        first = True
2531        fullnames = []
2532        for name in names:
2533            name = os.path.expandvars(name)
2534            name = os.path.expanduser(name)
2535            if not os.path.exists(name):
2536                msg = "File '%s' does not exists" % (name)
2537                raise IOError(msg)
2538            fullnames.append(name)
2539        if average:
2540            asaplog.push('Auto averaging integrations')
2541        stype = int(rcParams['scantable.storage'].lower() == 'disk')
2542        for name in fullnames:
2543            tbl = Scantable(stype)
2544            r = filler(tbl)
2545            rx = rcParams['scantable.reference']
2546            r.setreferenceexpr(rx)
2547            msg = "Importing %s..." % (name)
2548            asaplog.push(msg, False)
2549            r.open(name)# antenna, -1, -1, getpt)
2550            r.fill()
2551            if average:
2552                tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
2553            if not first:
2554                tbl = self._math._merge([self, tbl])
2555            Scantable.__init__(self, tbl)
2556            r.close()
2557            del r, tbl
2558            first = False
2559            #flush log
2560        asaplog.post()
2561        if unit is not None:
2562            self.set_fluxunit(unit)
2563        if not is_casapy():
2564            self.set_freqframe(rcParams['scantable.freqframe'])
2565
2566    def __getitem__(self, key):
2567        if key < 0:
2568            key += self.nrow()
2569        if key >= self.nrow():
2570            raise IndexError("Row index out of range.")
2571        return self._getspectrum(key)
2572
2573    def __setitem__(self, key, value):
2574        if key < 0:
2575            key += self.nrow()
2576        if key >= self.nrow():
2577            raise IndexError("Row index out of range.")
2578        if not hasattr(value, "__len__") or \
2579                len(value) > self.nchan(self.getif(key)):
2580            raise ValueError("Spectrum length doesn't match.")
2581        return self._setspectrum(value, key)
2582
2583    def __len__(self):
2584        return self.nrow()
2585
2586    def __iter__(self):
2587        for i in range(len(self)):
2588            yield self[i]
Note: See TracBrowser for help on using the repository browser.