source: trunk/python/scantable.py @ 1893

Last change on this file since 1893 was 1893, checked in by Takeshi Nakazato, 14 years ago

New Development: No

JIRA Issue: No

Ready for Test: Yes

Interface Changes: No

What Interface Changed: Please list interface changes

Test Programs: List test programs

Put in Release Notes: Yes/No?

Module(s): Module Names change impacts.

Description: Describe your changes here...

Bug fix on scantable constructor.


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 86.7 KB
Line 
1"""This module defines the scantable class."""
2
3import os
4try:
5    from functools import wraps as wraps_dec
6except ImportError:
7    from asap.compatibility import wraps as wraps_dec
8
9from asap.env import is_casapy
10from asap._asap import Scantable
11from asap._asap import filler
12from asap.parameters import rcParams
13from asap.logging import asaplog, asaplog_post_dec
14from asap.selector import selector
15from asap.linecatalog import linecatalog
16from asap.coordinate import coordinate
17from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
18
19
20def preserve_selection(func):
21    @wraps_dec(func)
22    def wrap(obj, *args, **kw):
23        basesel = obj.get_selection()
24        try:
25            val = func(obj, *args, **kw)
26        finally:
27            obj.set_selection(basesel)
28        return val
29    return wrap
30
31def is_scantable(filename):
32    """Is the given file a scantable?
33
34    Parameters:
35
36        filename: the name of the file/directory to test
37
38    """
39    if ( os.path.isdir(filename)
40         and os.path.exists(filename+'/table.info')
41         and os.path.exists(filename+'/table.dat') ):
42        f=open(filename+'/table.info')
43        l=f.readline()
44        f.close()
45        #if ( l.find('Scantable') != -1 ):
46        if ( l.find('Measurement Set') == -1 ):
47            return True
48        else:
49            return False
50    else:
51        return False
52##     return (os.path.isdir(filename)
53##             and not os.path.exists(filename+'/table.f1')
54##             and os.path.exists(filename+'/table.info'))
55
56def is_ms(filename):
57    """Is the given file a MeasurementSet?
58
59    Parameters:
60
61        filename: the name of the file/directory to test
62
63    """
64    if ( os.path.isdir(filename)
65         and os.path.exists(filename+'/table.info')
66         and os.path.exists(filename+'/table.dat') ):
67        f=open(filename+'/table.info')
68        l=f.readline()
69        f.close()
70        if ( l.find('Measurement Set') != -1 ):
71            return True
72        else:
73            return False
74    else:
75        return False
76   
77class scantable(Scantable):
78    """\
79        The ASAP container for scans (single-dish data).
80    """
81
82    @asaplog_post_dec
83    def __init__(self, filename, average=None, unit=None, getpt=None,
84                 antenna=None, parallactify=None):
85        """\
86        Create a scantable from a saved one or make a reference
87
88        Parameters:
89
90            filename:     the name of an asap table on disk
91                          or
92                          the name of a rpfits/sdfits/ms file
93                          (integrations within scans are auto averaged
94                          and the whole file is read) or
95                          [advanced] a reference to an existing scantable
96
97            average:      average all integrations withinb a scan on read.
98                          The default (True) is taken from .asaprc.
99
100            unit:         brightness unit; must be consistent with K or Jy.
101                          Over-rides the default selected by the filler
102                          (input rpfits/sdfits/ms) or replaces the value
103                          in existing scantables
104
105            getpt:        for MeasurementSet input data only:
106                          If True, all pointing data are filled.
107                          The deafult is False, which makes time to load
108                          the MS data faster in some cases.
109
110            antenna:      Antenna selection. integer (id) or string (name or id).
111
112            parallactify: Indicate that the data had been parallatified. Default
113                          is taken from rc file.
114
115        """
116        if average is None:
117            average = rcParams['scantable.autoaverage']
118        if getpt is None:
119            getpt = True
120        if antenna is not None:
121            asaplog.push("Antenna selection currently unsupported."
122                         "Using ''")
123            asaplog.post('WARN')
124        if antenna is None:
125            antenna = ''
126        elif type(antenna) == int:
127            antenna = '%s' % antenna
128        elif type(antenna) == list:
129            tmpstr = ''
130            for i in range( len(antenna) ):
131                if type(antenna[i]) == int:
132                    tmpstr = tmpstr + ('%s,'%(antenna[i]))
133                elif type(antenna[i]) == str:
134                    tmpstr=tmpstr+antenna[i]+','
135                else:
136                    raise TypeError('Bad antenna selection.')
137            antenna = tmpstr.rstrip(',')
138        parallactify = parallactify or rcParams['scantable.parallactify']
139        varlist = vars()
140        from asap._asap import stmath
141        self._math = stmath( rcParams['insitu'] )
142        if isinstance(filename, Scantable):
143            Scantable.__init__(self, filename)
144        else:
145            if isinstance(filename, str):
146                filename = os.path.expandvars(filename)
147                filename = os.path.expanduser(filename)
148                if not os.path.exists(filename):
149                    s = "File '%s' not found." % (filename)
150                    raise IOError(s)
151                if is_scantable(filename):
152                    ondisk = rcParams['scantable.storage'] == 'disk'
153                    Scantable.__init__(self, filename, ondisk)
154                    if unit is not None:
155                        self.set_fluxunit(unit)
156                    # do not reset to the default freqframe
157                    #self.set_freqframe(rcParams['scantable.freqframe'])
158                #elif os.path.isdir(filename) \
159                #         and not os.path.exists(filename+'/table.f1'):
160                elif is_ms(filename):
161                    self._fill([filename], unit, average, getpt, antenna)
162                elif os.path.isfile(filename):
163                    self._fill([filename], unit, average, getpt, antenna)
164                else:
165                    msg = "The given file '%s'is not a valid " \
166                          "asap table." % (filename)
167                    raise IOError(msg)
168            elif (isinstance(filename, list) or isinstance(filename, tuple)) \
169                  and isinstance(filename[-1], str):
170                self._fill(filename, unit, average, getpt, antenna)
171        self.parallactify(parallactify)
172        self._add_history("scantable", varlist)
173
174    @asaplog_post_dec
175    def save(self, name=None, format=None, overwrite=False):
176        """\
177        Store the scantable on disk. This can be an asap (aips++) Table,
178        SDFITS or MS2 format.
179
180        Parameters:
181
182            name:        the name of the outputfile. For format "ASCII"
183                         this is the root file name (data in 'name'.txt
184                         and header in 'name'_header.txt)
185
186            format:      an optional file format. Default is ASAP.
187                         Allowed are:
188
189                            * 'ASAP' (save as ASAP [aips++] Table),
190                            * 'SDFITS' (save as SDFITS file)
191                            * 'ASCII' (saves as ascii text file)
192                            * 'MS2' (saves as an casacore MeasurementSet V2)
193                            * 'FITS' (save as image FITS - not readable by class)
194                            * 'CLASS' (save as FITS readable by CLASS)
195
196            overwrite:   If the file should be overwritten if it exists.
197                         The default False is to return with warning
198                         without writing the output. USE WITH CARE.
199
200        Example::
201
202            scan.save('myscan.asap')
203            scan.save('myscan.sdfits', 'SDFITS')
204
205        """
206        from os import path
207        format = format or rcParams['scantable.save']
208        suffix = '.'+format.lower()
209        if name is None or name == "":
210            name = 'scantable'+suffix
211            msg = "No filename given. Using default name %s..." % name
212            asaplog.push(msg)
213        name = path.expandvars(name)
214        if path.isfile(name) or path.isdir(name):
215            if not overwrite:
216                msg = "File %s exists." % name
217                raise IOError(msg)
218        format2 = format.upper()
219        if format2 == 'ASAP':
220            self._save(name)
221        else:
222            from asap._asap import stwriter as stw
223            writer = stw(format2)
224            writer.write(self, name)
225        return
226
227    def copy(self):
228        """Return a copy of this scantable.
229
230        *Note*:
231
232            This makes a full (deep) copy. scan2 = scan1 makes a reference.
233
234        Example::
235
236            copiedscan = scan.copy()
237
238        """
239        sd = scantable(Scantable._copy(self))
240        return sd
241
242    def drop_scan(self, scanid=None):
243        """\
244        Return a new scantable where the specified scan number(s) has(have)
245        been dropped.
246
247        Parameters:
248
249            scanid:    a (list of) scan number(s)
250
251        """
252        from asap import _is_sequence_or_number as _is_valid
253        from asap import _to_list
254        from asap import unique
255        if not _is_valid(scanid):
256            raise RuntimeError( 'Please specify a scanno to drop from the scantable' )
257        scanid = _to_list(scanid)
258        allscans = unique([ self.getscan(i) for i in range(self.nrow())])
259        for sid in scanid: allscans.remove(sid)
260        if len(allscans) == 0:
261            raise ValueError("Can't remove all scans")
262        sel = selector(scans=allscans)
263        return self._select_copy(sel)
264
265    def _select_copy(self, selection):
266        orig = self.get_selection()
267        self.set_selection(orig+selection)
268        cp = self.copy()
269        self.set_selection(orig)
270        return cp
271
272    def get_scan(self, scanid=None):
273        """\
274        Return a specific scan (by scanno) or collection of scans (by
275        source name) in a new scantable.
276
277        *Note*:
278
279            See scantable.drop_scan() for the inverse operation.
280
281        Parameters:
282
283            scanid:    a (list of) scanno or a source name, unix-style
284                       patterns are accepted for source name matching, e.g.
285                       '*_R' gets all 'ref scans
286
287        Example::
288
289            # get all scans containing the source '323p459'
290            newscan = scan.get_scan('323p459')
291            # get all 'off' scans
292            refscans = scan.get_scan('*_R')
293            # get a susbset of scans by scanno (as listed in scan.summary())
294            newscan = scan.get_scan([0, 2, 7, 10])
295
296        """
297        if scanid is None:
298            raise RuntimeError( 'Please specify a scan no or name to '
299                                'retrieve from the scantable' )
300        try:
301            bsel = self.get_selection()
302            sel = selector()
303            if type(scanid) is str:
304                sel.set_name(scanid)
305                return self._select_copy(sel)
306            elif type(scanid) is int:
307                sel.set_scans([scanid])
308                return self._select_copy(sel)
309            elif type(scanid) is list:
310                sel.set_scans(scanid)
311                return self._select_copy(sel)
312            else:
313                msg = "Illegal scanid type, use 'int' or 'list' if ints."
314                raise TypeError(msg)
315        except RuntimeError:
316            raise
317
318    def __str__(self):
319        return Scantable._summary(self, True)
320
321    def summary(self, filename=None):
322        """\
323        Print a summary of the contents of this scantable.
324
325        Parameters:
326
327            filename:    the name of a file to write the putput to
328                         Default - no file output
329
330        """
331        info = Scantable._summary(self, True)
332        if filename is not None:
333            if filename is "":
334                filename = 'scantable_summary.txt'
335            from os.path import expandvars, isdir
336            filename = expandvars(filename)
337            if not isdir(filename):
338                data = open(filename, 'w')
339                data.write(info)
340                data.close()
341            else:
342                msg = "Illegal file name '%s'." % (filename)
343                raise IOError(msg)
344        return page(info)
345
346    def get_spectrum(self, rowno):
347        """Return the spectrum for the current row in the scantable as a list.
348
349        Parameters:
350
351             rowno:   the row number to retrieve the spectrum from
352
353        """
354        return self._getspectrum(rowno)
355
356    def get_mask(self, rowno):
357        """Return the mask for the current row in the scantable as a list.
358
359        Parameters:
360
361             rowno:   the row number to retrieve the mask from
362
363        """
364        return self._getmask(rowno)
365
366    def set_spectrum(self, spec, rowno):
367        """Return the spectrum for the current row in the scantable as a list.
368
369        Parameters:
370
371             spec:   the new spectrum
372
373             rowno:  the row number to set the spectrum for
374
375        """
376        assert(len(spec) == self.nchan())
377        return self._setspectrum(spec, rowno)
378
379    def get_coordinate(self, rowno):
380        """Return the (spectral) coordinate for a a given 'rowno'.
381
382        *Note*:
383
384            * This coordinate is only valid until a scantable method modifies
385              the frequency axis.
386            * This coordinate does contain the original frequency set-up
387              NOT the new frame. The conversions however are done using the user
388              specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
389              use scantable.freq_align first. Without it there is no closure,
390              i.e.::
391
392                  c = myscan.get_coordinate(0)
393                  c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
394
395        Parameters:
396
397             rowno:    the row number for the spectral coordinate
398
399        """
400        return coordinate(Scantable.get_coordinate(self, rowno))
401
402    def get_selection(self):
403        """\
404        Get the selection object currently set on this scantable.
405
406        Example::
407
408            sel = scan.get_selection()
409            sel.set_ifs(0)              # select IF 0
410            scan.set_selection(sel)     # apply modified selection
411
412        """
413        return selector(self._getselection())
414
415    def set_selection(self, selection=None, **kw):
416        """\
417        Select a subset of the data. All following operations on this scantable
418        are only applied to thi selection.
419
420        Parameters:
421
422            selection:    a selector object (default unset the selection), or
423                          any combination of "pols", "ifs", "beams", "scans",
424                          "cycles", "name", "query"
425
426        Examples::
427
428            sel = selector()         # create a selection object
429            self.set_scans([0, 3])    # select SCANNO 0 and 3
430            scan.set_selection(sel)  # set the selection
431            scan.summary()           # will only print summary of scanno 0 an 3
432            scan.set_selection()     # unset the selection
433            # or the equivalent
434            scan.set_selection(scans=[0,3])
435            scan.summary()           # will only print summary of scanno 0 an 3
436            scan.set_selection()     # unset the selection
437
438        """
439        if selection is None:
440            # reset
441            if len(kw) == 0:
442                selection = selector()
443            else:
444                # try keywords
445                for k in kw:
446                    if k not in selector.fields:
447                        raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
448                selection = selector(**kw)
449        self._setselection(selection)
450
451    def get_row(self, row=0, insitu=None):
452        """\
453        Select a row in the scantable.
454        Return a scantable with single row.
455
456        Parameters:
457
458            row:    row no of integration, default is 0.
459            insitu: if False a new scantable is returned. Otherwise, the
460                    scaling is done in-situ. The default is taken from .asaprc
461                    (False)
462
463        """
464        if insitu is None: insitu = rcParams['insitu']
465        if not insitu:
466            workscan = self.copy()
467        else:
468            workscan = self
469        # Select a row
470        sel=selector()
471        sel.set_scans([workscan.getscan(row)])
472        sel.set_cycles([workscan.getcycle(row)])
473        sel.set_beams([workscan.getbeam(row)])
474        sel.set_ifs([workscan.getif(row)])
475        sel.set_polarisations([workscan.getpol(row)])
476        sel.set_name(workscan._getsourcename(row))
477        workscan.set_selection(sel)
478        if not workscan.nrow() == 1:
479            msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
480            raise RuntimeError(msg)
481        del sel
482        if insitu:
483            self._assign(workscan)
484        else:
485            return workscan
486
487    @asaplog_post_dec
488    def stats(self, stat='stddev', mask=None, form='3.3f'):
489        """\
490        Determine the specified statistic of the current beam/if/pol
491        Takes a 'mask' as an optional parameter to specify which
492        channels should be excluded.
493
494        Parameters:
495
496            stat:    'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
497                     'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
498
499            mask:    an optional mask specifying where the statistic
500                     should be determined.
501
502            form:    format string to print statistic values
503
504        Example::
505
506            scan.set_unit('channel')
507            msk = scan.create_mask([100, 200], [500, 600])
508            scan.stats(stat='mean', mask=m)
509
510        """
511        mask = mask or []
512        if not self._check_ifs():
513            raise ValueError("Cannot apply mask as the IFs have different "
514                             "number of channels. Please use setselection() "
515                             "to select individual IFs")
516        rtnabc = False
517        if stat.lower().endswith('_abc'): rtnabc = True
518        getchan = False
519        if stat.lower().startswith('min') or stat.lower().startswith('max'):
520            chan = self._math._minmaxchan(self, mask, stat)
521            getchan = True
522            statvals = []
523        if not rtnabc: statvals = self._math._stats(self, mask, stat)
524
525        #def cb(i):
526        #    return statvals[i]
527
528        #return self._row_callback(cb, stat)
529
530        label=stat
531        #callback=cb
532        out = ""
533        #outvec = []
534        sep = '-'*50
535        for i in range(self.nrow()):
536            refstr = ''
537            statunit= ''
538            if getchan:
539                qx, qy = self.chan2data(rowno=i, chan=chan[i])
540                if rtnabc:
541                    statvals.append(qx['value'])
542                    refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
543                    statunit= '['+qx['unit']+']'
544                else:
545                    refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
546
547            tm = self._gettime(i)
548            src = self._getsourcename(i)
549            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
550            out += 'Time[%s]:\n' % (tm)
551            if self.nbeam(-1) > 1:
552                out +=  ' Beam[%d] ' % (self.getbeam(i))
553            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (self.getif(i))
554            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (self.getpol(i))
555            #outvec.append(callback(i))
556            #out += ('= %'+form) % (outvec[i]) +'   '+refstr+'\n'
557            out += ('= %'+form) % (statvals[i]) +'   '+refstr+'\n'
558            out +=  sep+"\n"
559
560
561        import os
562        if os.environ.has_key( 'USER' ):
563            usr = os.environ['USER']
564        else:
565            import commands
566            usr = commands.getoutput( 'whoami' )
567        tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
568        f = open(tmpfile,'w')
569        print >> f, sep
570        print >> f, ' %s %s' % (label, statunit)
571        print >> f, sep
572        print >> f, out
573        f.close()
574        f = open(tmpfile,'r')
575        x = f.readlines()
576        f.close()
577        asaplog.push(''.join(x), False)
578
579        return statvals
580
581    def chan2data(self, rowno=0, chan=0):
582        """\
583        Returns channel/frequency/velocity and spectral value
584        at an arbitrary row and channel in the scantable.
585
586        Parameters:
587
588            rowno:   a row number in the scantable. Default is the
589                     first row, i.e. rowno=0
590
591            chan:    a channel in the scantable. Default is the first
592                     channel, i.e. pos=0
593
594        """
595        if isinstance(rowno, int) and isinstance(chan, int):
596            qx = {'unit': self.get_unit(),
597                  'value': self._getabcissa(rowno)[chan]}
598            qy = {'unit': self.get_fluxunit(),
599                  'value': self._getspectrum(rowno)[chan]}
600            return qx, qy
601
602    def stddev(self, mask=None):
603        """\
604        Determine the standard deviation of the current beam/if/pol
605        Takes a 'mask' as an optional parameter to specify which
606        channels should be excluded.
607
608        Parameters:
609
610            mask:    an optional mask specifying where the standard
611                     deviation should be determined.
612
613        Example::
614
615            scan.set_unit('channel')
616            msk = scan.create_mask([100, 200], [500, 600])
617            scan.stddev(mask=m)
618
619        """
620        return self.stats(stat='stddev', mask=mask);
621
622
623    def get_column_names(self):
624        """\
625        Return a  list of column names, which can be used for selection.
626        """
627        return list(Scantable.get_column_names(self))
628
629    def get_tsys(self, row=-1):
630        """\
631        Return the System temperatures.
632
633        Parameters:
634
635            row:    the rowno to get the information for. (default all rows)
636
637        Returns:
638
639            a list of Tsys values for the current selection
640
641        """
642        if row > -1:
643            return self._get_column(self._gettsys, row)
644        return self._row_callback(self._gettsys, "Tsys")
645
646
647    def get_weather(self, row=-1):
648        """\
649        Return the weather informations.
650
651        Parameters:
652
653            row:    the rowno to get the information for. (default all rows)
654
655        Returns:
656
657            a dict or list of of dicts of values for the current selection
658
659        """
660
661        values = self._get_column(self._get_weather, row)
662        if row > -1:
663            return {'temperature': values[0],
664                    'pressure': values[1], 'humidity' : values[2],
665                    'windspeed' : values[3], 'windaz' : values[4]
666                    }
667        else:
668            out = []
669            for r in values:
670
671                out.append({'temperature': r[0],
672                            'pressure': r[1], 'humidity' : r[2],
673                            'windspeed' : r[3], 'windaz' : r[4]
674                    })
675            return out
676
677    def _row_callback(self, callback, label):
678        out = ""
679        outvec = []
680        sep = '-'*50
681        for i in range(self.nrow()):
682            tm = self._gettime(i)
683            src = self._getsourcename(i)
684            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
685            out += 'Time[%s]:\n' % (tm)
686            if self.nbeam(-1) > 1:
687                out +=  ' Beam[%d] ' % (self.getbeam(i))
688            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (self.getif(i))
689            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (self.getpol(i))
690            outvec.append(callback(i))
691            out += '= %3.3f\n' % (outvec[i])
692            out +=  sep+'\n'
693
694        asaplog.push(sep)
695        asaplog.push(" %s" % (label))
696        asaplog.push(sep)
697        asaplog.push(out)
698        asaplog.post()
699        return outvec
700
701    def _get_column(self, callback, row=-1):
702        """
703        """
704        if row == -1:
705            return [callback(i) for i in range(self.nrow())]
706        else:
707            if  0 <= row < self.nrow():
708                return callback(row)
709
710
711    def get_time(self, row=-1, asdatetime=False):
712        """\
713        Get a list of time stamps for the observations.
714        Return a datetime object for each integration time stamp in the scantable.
715
716        Parameters:
717
718            row:          row no of integration. Default -1 return all rows
719
720            asdatetime:   return values as datetime objects rather than strings
721
722        """
723        from time import strptime
724        from datetime import datetime
725        times = self._get_column(self._gettime, row)
726        if not asdatetime:
727            return times
728        format = "%Y/%m/%d/%H:%M:%S"
729        if isinstance(times, list):
730            return [datetime(*strptime(i, format)[:6]) for i in times]
731        else:
732            return datetime(*strptime(times, format)[:6])
733
734
735    def get_inttime(self, row=-1):
736        """\
737        Get a list of integration times for the observations.
738        Return a time in seconds for each integration in the scantable.
739
740        Parameters:
741
742            row:    row no of integration. Default -1 return all rows.
743
744        """
745        return self._get_column(self._getinttime, row)
746
747
748    def get_sourcename(self, row=-1):
749        """\
750        Get a list source names for the observations.
751        Return a string for each integration in the scantable.
752        Parameters:
753
754            row:    row no of integration. Default -1 return all rows.
755
756        """
757        return self._get_column(self._getsourcename, row)
758
759    def get_elevation(self, row=-1):
760        """\
761        Get a list of elevations for the observations.
762        Return a float for each integration in the scantable.
763
764        Parameters:
765
766            row:    row no of integration. Default -1 return all rows.
767
768        """
769        return self._get_column(self._getelevation, row)
770
771    def get_azimuth(self, row=-1):
772        """\
773        Get a list of azimuths for the observations.
774        Return a float for each integration in the scantable.
775
776        Parameters:
777            row:    row no of integration. Default -1 return all rows.
778
779        """
780        return self._get_column(self._getazimuth, row)
781
782    def get_parangle(self, row=-1):
783        """\
784        Get a list of parallactic angles for the observations.
785        Return a float for each integration in the scantable.
786
787        Parameters:
788
789            row:    row no of integration. Default -1 return all rows.
790
791        """
792        return self._get_column(self._getparangle, row)
793
794    def get_direction(self, row=-1):
795        """
796        Get a list of Positions on the sky (direction) for the observations.
797        Return a string for each integration in the scantable.
798
799        Parameters:
800
801            row:    row no of integration. Default -1 return all rows
802
803        """
804        return self._get_column(self._getdirection, row)
805
806    def get_directionval(self, row=-1):
807        """\
808        Get a list of Positions on the sky (direction) for the observations.
809        Return a float for each integration in the scantable.
810
811        Parameters:
812
813            row:    row no of integration. Default -1 return all rows
814
815        """
816        return self._get_column(self._getdirectionvec, row)
817
818    @asaplog_post_dec
819    def set_unit(self, unit='channel'):
820        """\
821        Set the unit for all following operations on this scantable
822
823        Parameters:
824
825            unit:    optional unit, default is 'channel'. Use one of '*Hz',
826                     'km/s', 'channel' or equivalent ''
827
828        """
829        varlist = vars()
830        if unit in ['', 'pixel', 'channel']:
831            unit = ''
832        inf = list(self._getcoordinfo())
833        inf[0] = unit
834        self._setcoordinfo(inf)
835        self._add_history("set_unit", varlist)
836
837    @asaplog_post_dec
838    def set_instrument(self, instr):
839        """\
840        Set the instrument for subsequent processing.
841
842        Parameters:
843
844            instr:    Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
845                      'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
846
847        """
848        self._setInstrument(instr)
849        self._add_history("set_instument", vars())
850
851    @asaplog_post_dec
852    def set_feedtype(self, feedtype):
853        """\
854        Overwrite the feed type, which might not be set correctly.
855
856        Parameters:
857
858            feedtype:     'linear' or 'circular'
859
860        """
861        self._setfeedtype(feedtype)
862        self._add_history("set_feedtype", vars())
863
864    @asaplog_post_dec
865    def set_doppler(self, doppler='RADIO'):
866        """\
867        Set the doppler for all following operations on this scantable.
868
869        Parameters:
870
871            doppler:    One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
872
873        """
874        varlist = vars()
875        inf = list(self._getcoordinfo())
876        inf[2] = doppler
877        self._setcoordinfo(inf)
878        self._add_history("set_doppler", vars())
879
880    @asaplog_post_dec
881    def set_freqframe(self, frame=None):
882        """\
883        Set the frame type of the Spectral Axis.
884
885        Parameters:
886
887            frame:   an optional frame type, default 'LSRK'. Valid frames are:
888                     'TOPO', 'LSRD', 'LSRK', 'BARY',
889                     'GEO', 'GALACTO', 'LGROUP', 'CMB'
890
891        Example::
892
893            scan.set_freqframe('BARY')
894
895        """
896        frame = frame or rcParams['scantable.freqframe']
897        varlist = vars()
898        # "REST" is not implemented in casacore
899        #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
900        #           'GEO', 'GALACTO', 'LGROUP', 'CMB']
901        valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
902                   'GEO', 'GALACTO', 'LGROUP', 'CMB']
903
904        if frame in valid:
905            inf = list(self._getcoordinfo())
906            inf[1] = frame
907            self._setcoordinfo(inf)
908            self._add_history("set_freqframe", varlist)
909        else:
910            msg  = "Please specify a valid freq type. Valid types are:\n", valid
911            raise TypeError(msg)
912
913    @asaplog_post_dec
914    def set_dirframe(self, frame=""):
915        """\
916        Set the frame type of the Direction on the sky.
917
918        Parameters:
919
920            frame:   an optional frame type, default ''. Valid frames are:
921                     'J2000', 'B1950', 'GALACTIC'
922
923        Example:
924
925            scan.set_dirframe('GALACTIC')
926
927        """
928        varlist = vars()
929        Scantable.set_dirframe(self, frame)
930        self._add_history("set_dirframe", varlist)
931
932    def get_unit(self):
933        """\
934        Get the default unit set in this scantable
935
936        Returns:
937
938            A unit string
939
940        """
941        inf = self._getcoordinfo()
942        unit = inf[0]
943        if unit == '': unit = 'channel'
944        return unit
945
946    @asaplog_post_dec
947    def get_abcissa(self, rowno=0):
948        """\
949        Get the abcissa in the current coordinate setup for the currently
950        selected Beam/IF/Pol
951
952        Parameters:
953
954            rowno:    an optional row number in the scantable. Default is the
955                      first row, i.e. rowno=0
956
957        Returns:
958
959            The abcissa values and the format string (as a dictionary)
960
961        """
962        abc = self._getabcissa(rowno)
963        lbl = self._getabcissalabel(rowno)
964        return abc, lbl
965
966    @asaplog_post_dec
967    def flag(self, mask=None, unflag=False):
968        """\
969        Flag the selected data using an optional channel mask.
970
971        Parameters:
972
973            mask:   an optional channel mask, created with create_mask. Default
974                    (no mask) is all channels.
975
976            unflag:    if True, unflag the data
977
978        """
979        varlist = vars()
980        mask = mask or []
981        self._flag(mask, unflag)
982        self._add_history("flag", varlist)
983
984    @asaplog_post_dec
985    def flag_row(self, rows=[], unflag=False):
986        """\
987        Flag the selected data in row-based manner.
988
989        Parameters:
990
991            rows:   list of row numbers to be flagged. Default is no row
992                    (must be explicitly specified to execute row-based flagging).
993
994            unflag: if True, unflag the data.
995
996        """
997        varlist = vars()
998        self._flag_row(rows, unflag)
999        self._add_history("flag_row", varlist)
1000
1001    @asaplog_post_dec
1002    def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
1003        """\
1004        Flag the selected data outside a specified range (in channel-base)
1005
1006        Parameters:
1007
1008            uthres:      upper threshold.
1009
1010            dthres:      lower threshold
1011
1012            clipoutside: True for flagging data outside the range [dthres:uthres].
1013                         False for glagging data inside the range.
1014
1015            unflag:      if True, unflag the data.
1016
1017        """
1018        varlist = vars()
1019        self._clip(uthres, dthres, clipoutside, unflag)
1020        self._add_history("clip", varlist)
1021
1022    @asaplog_post_dec
1023    def lag_flag(self, start, end, unit="MHz", insitu=None):
1024        """\
1025        Flag the data in 'lag' space by providing a frequency to remove.
1026        Flagged data in the scantable gets interpolated over the region.
1027        No taper is applied.
1028
1029        Parameters:
1030
1031            start:    the start frequency (really a period within the
1032                      bandwidth)  or period to remove
1033
1034            end:      the end frequency or period to remove
1035
1036            unit:     the frequency unit (default "MHz") or "" for
1037                      explicit lag channels
1038
1039        *Notes*:
1040
1041            It is recommended to flag edges of the band or strong
1042            signals beforehand.
1043
1044        """
1045        if insitu is None: insitu = rcParams['insitu']
1046        self._math._setinsitu(insitu)
1047        varlist = vars()
1048        base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1049        if not (unit == "" or base.has_key(unit)):
1050            raise ValueError("%s is not a valid unit." % unit)
1051        if unit == "":
1052            s = scantable(self._math._lag_flag(self, start, end, "lags"))
1053        else:
1054            s = scantable(self._math._lag_flag(self, start*base[unit],
1055                                               end*base[unit], "frequency"))
1056        s._add_history("lag_flag", varlist)
1057        if insitu:
1058            self._assign(s)
1059        else:
1060            return s
1061
1062    @asaplog_post_dec
1063    def create_mask(self, *args, **kwargs):
1064        """\
1065        Compute and return a mask based on [min, max] windows.
1066        The specified windows are to be INCLUDED, when the mask is
1067        applied.
1068
1069        Parameters:
1070
1071            [min, max], [min2, max2], ...
1072                Pairs of start/end points (inclusive)specifying the regions
1073                to be masked
1074
1075            invert:     optional argument. If specified as True,
1076                        return an inverted mask, i.e. the regions
1077                        specified are EXCLUDED
1078
1079            row:        create the mask using the specified row for
1080                        unit conversions, default is row=0
1081                        only necessary if frequency varies over rows.
1082
1083        Examples::
1084
1085            scan.set_unit('channel')
1086            # a)
1087            msk = scan.create_mask([400, 500], [800, 900])
1088            # masks everything outside 400 and 500
1089            # and 800 and 900 in the unit 'channel'
1090
1091            # b)
1092            msk = scan.create_mask([400, 500], [800, 900], invert=True)
1093            # masks the regions between 400 and 500
1094            # and 800 and 900 in the unit 'channel'
1095
1096            # c)
1097            #mask only channel 400
1098            msk =  scan.create_mask([400])
1099
1100        """
1101        row = kwargs.get("row", 0)
1102        data = self._getabcissa(row)
1103        u = self._getcoordinfo()[0]
1104        if u == "":
1105            u = "channel"
1106        msg = "The current mask window unit is %s" % u
1107        i = self._check_ifs()
1108        if not i:
1109            msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1110        asaplog.push(msg)
1111        n = self.nchan()
1112        msk = _n_bools(n, False)
1113        # test if args is a 'list' or a 'normal *args - UGLY!!!
1114
1115        ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1116             and args or args[0]
1117        for window in ws:
1118            if len(window) == 1:
1119                window = [window[0], window[0]]
1120            if len(window) == 0 or  len(window) > 2:
1121                raise ValueError("A window needs to be defined as [start(, end)]")
1122            if window[0] > window[1]:
1123                tmp = window[0]
1124                window[0] = window[1]
1125                window[1] = tmp
1126            for i in range(n):
1127                if data[i] >= window[0] and data[i] <= window[1]:
1128                    msk[i] = True
1129        if kwargs.has_key('invert'):
1130            if kwargs.get('invert'):
1131                msk = mask_not(msk)
1132        return msk
1133
1134    def get_masklist(self, mask=None, row=0):
1135        """\
1136        Compute and return a list of mask windows, [min, max].
1137
1138        Parameters:
1139
1140            mask:       channel mask, created with create_mask.
1141
1142            row:        calcutate the masklist using the specified row
1143                        for unit conversions, default is row=0
1144                        only necessary if frequency varies over rows.
1145
1146        Returns:
1147
1148            [min, max], [min2, max2], ...
1149                Pairs of start/end points (inclusive)specifying
1150                the masked regions
1151
1152        """
1153        if not (isinstance(mask,list) or isinstance(mask, tuple)):
1154            raise TypeError("The mask should be list or tuple.")
1155        if len(mask) < 2:
1156            raise TypeError("The mask elements should be > 1")
1157        if self.nchan() != len(mask):
1158            msg = "Number of channels in scantable != number of mask elements"
1159            raise TypeError(msg)
1160        data = self._getabcissa(row)
1161        u = self._getcoordinfo()[0]
1162        if u == "":
1163            u = "channel"
1164        msg = "The current mask window unit is %s" % u
1165        i = self._check_ifs()
1166        if not i:
1167            msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1168        asaplog.push(msg)
1169        masklist=[]
1170        ist, ien = None, None
1171        ist, ien=self.get_mask_indices(mask)
1172        if ist is not None and ien is not None:
1173            for i in xrange(len(ist)):
1174                range=[data[ist[i]],data[ien[i]]]
1175                range.sort()
1176                masklist.append([range[0],range[1]])
1177        return masklist
1178
1179    def get_mask_indices(self, mask=None):
1180        """\
1181        Compute and Return lists of mask start indices and mask end indices.
1182
1183        Parameters:
1184
1185            mask:       channel mask, created with create_mask.
1186
1187        Returns:
1188
1189            List of mask start indices and that of mask end indices,
1190            i.e., [istart1,istart2,....], [iend1,iend2,....].
1191
1192        """
1193        if not (isinstance(mask,list) or isinstance(mask, tuple)):
1194            raise TypeError("The mask should be list or tuple.")
1195        if len(mask) < 2:
1196            raise TypeError("The mask elements should be > 1")
1197        istart=[]
1198        iend=[]
1199        if mask[0]: istart.append(0)
1200        for i in range(len(mask)-1):
1201            if not mask[i] and mask[i+1]:
1202                istart.append(i+1)
1203            elif mask[i] and not mask[i+1]:
1204                iend.append(i)
1205        if mask[len(mask)-1]: iend.append(len(mask)-1)
1206        if len(istart) != len(iend):
1207            raise RuntimeError("Numbers of mask start != mask end.")
1208        for i in range(len(istart)):
1209            if istart[i] > iend[i]:
1210                raise RuntimeError("Mask start index > mask end index")
1211                break
1212        return istart,iend
1213
1214#    def get_restfreqs(self):
1215#        """
1216#        Get the restfrequency(s) stored in this scantable.
1217#        The return value(s) are always of unit 'Hz'
1218#        Parameters:
1219#            none
1220#        Returns:
1221#            a list of doubles
1222#        """
1223#        return list(self._getrestfreqs())
1224
1225    def get_restfreqs(self, ids=None):
1226        """\
1227        Get the restfrequency(s) stored in this scantable.
1228        The return value(s) are always of unit 'Hz'
1229
1230        Parameters:
1231
1232            ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1233                 be retrieved
1234
1235        Returns:
1236
1237            dictionary containing ids and a list of doubles for each id
1238
1239        """
1240        if ids is None:
1241            rfreqs={}
1242            idlist = self.getmolnos()
1243            for i in idlist:
1244                rfreqs[i]=list(self._getrestfreqs(i))
1245            return rfreqs
1246        else:
1247            if type(ids)==list or type(ids)==tuple:
1248                rfreqs={}
1249                for i in ids:
1250                    rfreqs[i]=list(self._getrestfreqs(i))
1251                return rfreqs
1252            else:
1253                return list(self._getrestfreqs(ids))
1254            #return list(self._getrestfreqs(ids))
1255
1256    def set_restfreqs(self, freqs=None, unit='Hz'):
1257        """\
1258        Set or replace the restfrequency specified and
1259        If the 'freqs' argument holds a scalar,
1260        then that rest frequency will be applied to all the selected
1261        data.  If the 'freqs' argument holds
1262        a vector, then it MUST be of equal or smaller length than
1263        the number of IFs (and the available restfrequencies will be
1264        replaced by this vector).  In this case, *all* data have
1265        the restfrequency set per IF according
1266        to the corresponding value you give in the 'freqs' vector.
1267        E.g. 'freqs=[1e9, 2e9]'  would mean IF 0 gets restfreq 1e9 and
1268        IF 1 gets restfreq 2e9.
1269
1270        You can also specify the frequencies via a linecatalog.
1271
1272        Parameters:
1273
1274            freqs:   list of rest frequency values or string idenitfiers
1275
1276            unit:    unit for rest frequency (default 'Hz')
1277
1278
1279        Example::
1280
1281            # set the given restfrequency for the all currently selected IFs
1282            scan.set_restfreqs(freqs=1.4e9)
1283            # set restfrequencies for the n IFs  (n > 1) in the order of the
1284            # list, i.e
1285            # IF0 -> 1.4e9, IF1 ->  1.41e9, IF3 -> 1.42e9
1286            # len(list_of_restfreqs) == nIF
1287            # for nIF == 1 the following will set multiple restfrequency for
1288            # that IF
1289            scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
1290            # set multiple restfrequencies per IF. as a list of lists where
1291            # the outer list has nIF elements, the inner s arbitrary
1292            scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
1293
1294       *Note*:
1295
1296            To do more sophisticate Restfrequency setting, e.g. on a
1297            source and IF basis, use scantable.set_selection() before using
1298            this function::
1299
1300                # provided your scantable is called scan
1301                selection = selector()
1302                selection.set_name("ORION*")
1303                selection.set_ifs([1])
1304                scan.set_selection(selection)
1305                scan.set_restfreqs(freqs=86.6e9)
1306
1307        """
1308        varlist = vars()
1309        from asap import linecatalog
1310        # simple  value
1311        if isinstance(freqs, int) or isinstance(freqs, float):
1312            self._setrestfreqs([freqs], [""], unit)
1313        # list of values
1314        elif isinstance(freqs, list) or isinstance(freqs, tuple):
1315            # list values are scalars
1316            if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
1317                if len(freqs) == 1:
1318                    self._setrestfreqs(freqs, [""], unit)
1319                else:
1320                    # allow the 'old' mode of setting mulitple IFs
1321                    sel = selector()
1322                    savesel = self._getselection()
1323                    iflist = self.getifnos()
1324                    if len(freqs)>len(iflist):
1325                        raise ValueError("number of elements in list of list "
1326                                         "exeeds the current IF selections")
1327                    iflist = self.getifnos()
1328                    for i, fval in enumerate(freqs):
1329                        sel.set_ifs(iflist[i])
1330                        self._setselection(sel)
1331                        self._setrestfreqs([fval], [""], unit)
1332                    self._setselection(savesel)
1333
1334            # list values are dict, {'value'=, 'name'=)
1335            elif isinstance(freqs[-1], dict):
1336                values = []
1337                names = []
1338                for d in freqs:
1339                    values.append(d["value"])
1340                    names.append(d["name"])
1341                self._setrestfreqs(values, names, unit)
1342            elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
1343                sel = selector()
1344                savesel = self._getselection()
1345                iflist = self.getifnos()
1346                if len(freqs)>len(iflist):
1347                    raise ValueError("number of elements in list of list exeeds"
1348                                     " the current IF selections")
1349                for i, fval in enumerate(freqs):
1350                    sel.set_ifs(iflist[i])
1351                    self._setselection(sel)
1352                    self._setrestfreqs(fval, [""], unit)
1353                self._setselection(savesel)
1354        # freqs are to be taken from a linecatalog
1355        elif isinstance(freqs, linecatalog):
1356            sel = selector()
1357            savesel = self._getselection()
1358            for i in xrange(freqs.nrow()):
1359                sel.set_ifs(iflist[i])
1360                self._setselection(sel)
1361                self._setrestfreqs([freqs.get_frequency(i)],
1362                                   [freqs.get_name(i)], "MHz")
1363                # ensure that we are not iterating past nIF
1364                if i == self.nif()-1: break
1365            self._setselection(savesel)
1366        else:
1367            return
1368        self._add_history("set_restfreqs", varlist)
1369
1370    def shift_refpix(self, delta):
1371        """\
1372        Shift the reference pixel of the Spectra Coordinate by an
1373        integer amount.
1374
1375        Parameters:
1376
1377            delta:   the amount to shift by
1378
1379        *Note*:
1380
1381            Be careful using this with broadband data.
1382
1383        """
1384        Scantable.shift_refpix(self, delta)
1385
1386    @asaplog_post_dec
1387    def history(self, filename=None):
1388        """\
1389        Print the history. Optionally to a file.
1390
1391        Parameters:
1392
1393            filename:    The name  of the file to save the history to.
1394
1395        """
1396        hist = list(self._gethistory())
1397        out = "-"*80
1398        for h in hist:
1399            if h.startswith("---"):
1400                out = "\n".join([out, h])
1401            else:
1402                items = h.split("##")
1403                date = items[0]
1404                func = items[1]
1405                items = items[2:]
1406                out += "\n"+date+"\n"
1407                out += "Function: %s\n  Parameters:" % (func)
1408                for i in items:
1409                    s = i.split("=")
1410                    out += "\n   %s = %s" % (s[0], s[1])
1411                out = "\n".join([out, "-"*80])
1412        if filename is not None:
1413            if filename is "":
1414                filename = 'scantable_history.txt'
1415            import os
1416            filename = os.path.expandvars(os.path.expanduser(filename))
1417            if not os.path.isdir(filename):
1418                data = open(filename, 'w')
1419                data.write(out)
1420                data.close()
1421            else:
1422                msg = "Illegal file name '%s'." % (filename)
1423                raise IOError(msg)
1424        return page(out)
1425    #
1426    # Maths business
1427    #
1428    @asaplog_post_dec
1429    def average_time(self, mask=None, scanav=False, weight='tint', align=False):
1430        """\
1431        Return the (time) weighted average of a scan.
1432
1433        *Note*:
1434
1435            in channels only - align if necessary
1436
1437        Parameters:
1438
1439            mask:     an optional mask (only used for 'var' and 'tsys'
1440                      weighting)
1441
1442            scanav:   True averages each scan separately
1443                      False (default) averages all scans together,
1444
1445            weight:   Weighting scheme.
1446                      'none'     (mean no weight)
1447                      'var'      (1/var(spec) weighted)
1448                      'tsys'     (1/Tsys**2 weighted)
1449                      'tint'     (integration time weighted)
1450                      'tintsys'  (Tint/Tsys**2)
1451                      'median'   ( median averaging)
1452                      The default is 'tint'
1453
1454            align:    align the spectra in velocity before averaging. It takes
1455                      the time of the first spectrum as reference time.
1456
1457        Example::
1458
1459            # time average the scantable without using a mask
1460            newscan = scan.average_time()
1461
1462        """
1463        varlist = vars()
1464        weight = weight or 'TINT'
1465        mask = mask or ()
1466        scanav = (scanav and 'SCAN') or 'NONE'
1467        scan = (self, )
1468
1469        if align:
1470            scan = (self.freq_align(insitu=False), )
1471        s = None
1472        if weight.upper() == 'MEDIAN':
1473            s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1474                                                     scanav))
1475        else:
1476            s = scantable(self._math._average(scan, mask, weight.upper(),
1477                          scanav))
1478        s._add_history("average_time", varlist)
1479        return s
1480
1481    @asaplog_post_dec
1482    def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
1483        """\
1484        Return a scan where all spectra are converted to either
1485        Jansky or Kelvin depending upon the flux units of the scan table.
1486        By default the function tries to look the values up internally.
1487        If it can't find them (or if you want to over-ride), you must
1488        specify EITHER jyperk OR eta (and D which it will try to look up
1489        also if you don't set it). jyperk takes precedence if you set both.
1490
1491        Parameters:
1492
1493            jyperk:      the Jy / K conversion factor
1494
1495            eta:         the aperture efficiency
1496
1497            d:           the geomtric diameter (metres)
1498
1499            insitu:      if False a new scantable is returned.
1500                         Otherwise, the scaling is done in-situ
1501                         The default is taken from .asaprc (False)
1502
1503        """
1504        if insitu is None: insitu = rcParams['insitu']
1505        self._math._setinsitu(insitu)
1506        varlist = vars()
1507        jyperk = jyperk or -1.0
1508        d = d or -1.0
1509        eta = eta or -1.0
1510        s = scantable(self._math._convertflux(self, d, eta, jyperk))
1511        s._add_history("convert_flux", varlist)
1512        if insitu: self._assign(s)
1513        else: return s
1514
1515    @asaplog_post_dec
1516    def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1517        """\
1518        Return a scan after applying a gain-elevation correction.
1519        The correction can be made via either a polynomial or a
1520        table-based interpolation (and extrapolation if necessary).
1521        You specify polynomial coefficients, an ascii table or neither.
1522        If you specify neither, then a polynomial correction will be made
1523        with built in coefficients known for certain telescopes (an error
1524        will occur if the instrument is not known).
1525        The data and Tsys are *divided* by the scaling factors.
1526
1527        Parameters:
1528
1529            poly:        Polynomial coefficients (default None) to compute a
1530                         gain-elevation correction as a function of
1531                         elevation (in degrees).
1532
1533            filename:    The name of an ascii file holding correction factors.
1534                         The first row of the ascii file must give the column
1535                         names and these MUST include columns
1536                         "ELEVATION" (degrees) and "FACTOR" (multiply data
1537                         by this) somewhere.
1538                         The second row must give the data type of the
1539                         column. Use 'R' for Real and 'I' for Integer.
1540                         An example file would be
1541                         (actual factors are arbitrary) :
1542
1543                         TIME ELEVATION FACTOR
1544                         R R R
1545                         0.1 0 0.8
1546                         0.2 20 0.85
1547                         0.3 40 0.9
1548                         0.4 60 0.85
1549                         0.5 80 0.8
1550                         0.6 90 0.75
1551
1552            method:      Interpolation method when correcting from a table.
1553                         Values are  "nearest", "linear" (default), "cubic"
1554                         and "spline"
1555
1556            insitu:      if False a new scantable is returned.
1557                         Otherwise, the scaling is done in-situ
1558                         The default is taken from .asaprc (False)
1559
1560        """
1561
1562        if insitu is None: insitu = rcParams['insitu']
1563        self._math._setinsitu(insitu)
1564        varlist = vars()
1565        poly = poly or ()
1566        from os.path import expandvars
1567        filename = expandvars(filename)
1568        s = scantable(self._math._gainel(self, poly, filename, method))
1569        s._add_history("gain_el", varlist)
1570        if insitu:
1571            self._assign(s)
1572        else:
1573            return s
1574
1575    @asaplog_post_dec
1576    def freq_align(self, reftime=None, method='cubic', insitu=None):
1577        """\
1578        Return a scan where all rows have been aligned in frequency/velocity.
1579        The alignment frequency frame (e.g. LSRK) is that set by function
1580        set_freqframe.
1581
1582        Parameters:
1583
1584            reftime:     reference time to align at. By default, the time of
1585                         the first row of data is used.
1586
1587            method:      Interpolation method for regridding the spectra.
1588                         Choose from "nearest", "linear", "cubic" (default)
1589                         and "spline"
1590
1591            insitu:      if False a new scantable is returned.
1592                         Otherwise, the scaling is done in-situ
1593                         The default is taken from .asaprc (False)
1594
1595        """
1596        if insitu is None: insitu = rcParams["insitu"]
1597        self._math._setinsitu(insitu)
1598        varlist = vars()
1599        reftime = reftime or ""
1600        s = scantable(self._math._freq_align(self, reftime, method))
1601        s._add_history("freq_align", varlist)
1602        if insitu: self._assign(s)
1603        else: return s
1604
1605    @asaplog_post_dec
1606    def opacity(self, tau=None, insitu=None):
1607        """\
1608        Apply an opacity correction. The data
1609        and Tsys are multiplied by the correction factor.
1610
1611        Parameters:
1612
1613            tau:         (list of) opacity from which the correction factor is
1614                         exp(tau*ZD)
1615                         where ZD is the zenith-distance.
1616                         If a list is provided, it has to be of length nIF,
1617                         nIF*nPol or 1 and in order of IF/POL, e.g.
1618                         [opif0pol0, opif0pol1, opif1pol0 ...]
1619                         if tau is `None` the opacities are determined from a
1620                         model.
1621
1622            insitu:      if False a new scantable is returned.
1623                         Otherwise, the scaling is done in-situ
1624                         The default is taken from .asaprc (False)
1625
1626        """
1627        if insitu is None: insitu = rcParams['insitu']
1628        self._math._setinsitu(insitu)
1629        varlist = vars()
1630        if not hasattr(tau, "__len__"):
1631            tau = [tau]
1632        s = scantable(self._math._opacity(self, tau))
1633        s._add_history("opacity", varlist)
1634        if insitu: self._assign(s)
1635        else: return s
1636
1637    @asaplog_post_dec
1638    def bin(self, width=5, insitu=None):
1639        """\
1640        Return a scan where all spectra have been binned up.
1641
1642        Parameters:
1643
1644            width:       The bin width (default=5) in pixels
1645
1646            insitu:      if False a new scantable is returned.
1647                         Otherwise, the scaling is done in-situ
1648                         The default is taken from .asaprc (False)
1649
1650        """
1651        if insitu is None: insitu = rcParams['insitu']
1652        self._math._setinsitu(insitu)
1653        varlist = vars()
1654        s = scantable(self._math._bin(self, width))
1655        s._add_history("bin", varlist)
1656        if insitu:
1657            self._assign(s)
1658        else:
1659            return s
1660
1661    @asaplog_post_dec
1662    def resample(self, width=5, method='cubic', insitu=None):
1663        """\
1664        Return a scan where all spectra have been binned up.
1665
1666        Parameters:
1667
1668            width:       The bin width (default=5) in pixels
1669
1670            method:      Interpolation method when correcting from a table.
1671                         Values are  "nearest", "linear", "cubic" (default)
1672                         and "spline"
1673
1674            insitu:      if False a new scantable is returned.
1675                         Otherwise, the scaling is done in-situ
1676                         The default is taken from .asaprc (False)
1677
1678        """
1679        if insitu is None: insitu = rcParams['insitu']
1680        self._math._setinsitu(insitu)
1681        varlist = vars()
1682        s = scantable(self._math._resample(self, method, width))
1683        s._add_history("resample", varlist)
1684        if insitu: self._assign(s)
1685        else: return s
1686
1687    @asaplog_post_dec
1688    def average_pol(self, mask=None, weight='none'):
1689        """\
1690        Average the Polarisations together.
1691
1692        Parameters:
1693
1694            mask:        An optional mask defining the region, where the
1695                         averaging will be applied. The output will have all
1696                         specified points masked.
1697
1698            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1699                         weighted), or 'tsys' (1/Tsys**2 weighted)
1700
1701        """
1702        varlist = vars()
1703        mask = mask or ()
1704        s = scantable(self._math._averagepol(self, mask, weight.upper()))
1705        s._add_history("average_pol", varlist)
1706        return s
1707
1708    @asaplog_post_dec
1709    def average_beam(self, mask=None, weight='none'):
1710        """\
1711        Average the Beams together.
1712
1713        Parameters:
1714            mask:        An optional mask defining the region, where the
1715                         averaging will be applied. The output will have all
1716                         specified points masked.
1717
1718            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1719                         weighted), or 'tsys' (1/Tsys**2 weighted)
1720
1721        """
1722        varlist = vars()
1723        mask = mask or ()
1724        s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1725        s._add_history("average_beam", varlist)
1726        return s
1727
1728    def parallactify(self, pflag):
1729        """\
1730        Set a flag to indicate whether this data should be treated as having
1731        been 'parallactified' (total phase == 0.0)
1732
1733        Parameters:
1734
1735            pflag:  Bool indicating whether to turn this on (True) or
1736                    off (False)
1737
1738        """
1739        varlist = vars()
1740        self._parallactify(pflag)
1741        self._add_history("parallactify", varlist)
1742
1743    @asaplog_post_dec
1744    def convert_pol(self, poltype=None):
1745        """\
1746        Convert the data to a different polarisation type.
1747        Note that you will need cross-polarisation terms for most conversions.
1748
1749        Parameters:
1750
1751            poltype:    The new polarisation type. Valid types are:
1752                        "linear", "circular", "stokes" and "linpol"
1753
1754        """
1755        varlist = vars()
1756        s = scantable(self._math._convertpol(self, poltype))
1757        s._add_history("convert_pol", varlist)
1758        return s
1759
1760    @asaplog_post_dec
1761    def smooth(self, kernel="hanning", width=5.0, order=2, plot=False, insitu=None):
1762        """\
1763        Smooth the spectrum by the specified kernel (conserving flux).
1764
1765        Parameters:
1766
1767            kernel:     The type of smoothing kernel. Select from
1768                        'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1769                        or 'poly'
1770
1771            width:      The width of the kernel in pixels. For hanning this is
1772                        ignored otherwise it defauls to 5 pixels.
1773                        For 'gaussian' it is the Full Width Half
1774                        Maximum. For 'boxcar' it is the full width.
1775                        For 'rmedian' and 'poly' it is the half width.
1776
1777            order:      Optional parameter for 'poly' kernel (default is 2), to
1778                        specify the order of the polnomial. Ignored by all other
1779                        kernels.
1780
1781            plot:       plot the original and the smoothed spectra.
1782                        In this each indivual fit has to be approved, by
1783                        typing 'y' or 'n'
1784
1785            insitu:     if False a new scantable is returned.
1786                        Otherwise, the scaling is done in-situ
1787                        The default is taken from .asaprc (False)
1788
1789        """
1790        if insitu is None: insitu = rcParams['insitu']
1791        self._math._setinsitu(insitu)
1792        varlist = vars()
1793
1794        if plot: orgscan = self.copy()
1795
1796        s = scantable(self._math._smooth(self, kernel.lower(), width, order))
1797        s._add_history("smooth", varlist)
1798
1799        if plot:
1800            if rcParams['plotter.gui']:
1801                from asap.asaplotgui import asaplotgui as asaplot
1802            else:
1803                from asap.asaplot import asaplot
1804            self._p=asaplot()
1805            self._p.set_panels()
1806            ylab=s._get_ordinate_label()
1807            #self._p.palette(0,["#777777","red"])
1808            for r in xrange(s.nrow()):
1809                xsm=s._getabcissa(r)
1810                ysm=s._getspectrum(r)
1811                xorg=orgscan._getabcissa(r)
1812                yorg=orgscan._getspectrum(r)
1813                self._p.clear()
1814                self._p.hold()
1815                self._p.set_axes('ylabel',ylab)
1816                self._p.set_axes('xlabel',s._getabcissalabel(r))
1817                self._p.set_axes('title',s._getsourcename(r))
1818                self._p.set_line(label='Original',color="#777777")
1819                self._p.plot(xorg,yorg)
1820                self._p.set_line(label='Smoothed',color="red")
1821                self._p.plot(xsm,ysm)
1822                ### Ugly part for legend
1823                for i in [0,1]:
1824                    self._p.subplots[0]['lines'].append([self._p.subplots[0]['axes'].lines[i]])
1825                self._p.release()
1826                ### Ugly part for legend
1827                self._p.subplots[0]['lines']=[]
1828                res = raw_input("Accept smoothing ([y]/n): ")
1829                if res.upper() == 'N':
1830                    s._setspectrum(yorg, r)
1831            self._p.unmap()
1832            self._p = None
1833            del orgscan
1834
1835        if insitu: self._assign(s)
1836        else: return s
1837
1838    @asaplog_post_dec
1839    def poly_baseline(self, mask=None, order=0, plot=False, uselin=False,
1840                      insitu=None):
1841        """\
1842        Return a scan which has been baselined (all rows) by a polynomial.
1843
1844        Parameters:
1845
1846            mask:       an optional mask
1847
1848            order:      the order of the polynomial (default is 0)
1849
1850            plot:       plot the fit and the residual. In this each
1851                        indivual fit has to be approved, by typing 'y'
1852                        or 'n'
1853
1854            uselin:     use linear polynomial fit
1855
1856            insitu:     if False a new scantable is returned.
1857                        Otherwise, the scaling is done in-situ
1858                        The default is taken from .asaprc (False)
1859
1860        Example::
1861
1862            # return a scan baselined by a third order polynomial,
1863            # not using a mask
1864            bscan = scan.poly_baseline(order=3)
1865
1866        """
1867        if insitu is None: insitu = rcParams['insitu']
1868        if not insitu:
1869            workscan = self.copy()
1870        else:
1871            workscan = self
1872        varlist = vars()
1873        if mask is None:
1874            mask = [True for i in xrange(self.nchan(-1))]
1875
1876        from asap.asapfitter import fitter
1877        try:
1878            f = fitter()
1879            if uselin:
1880                f.set_function(lpoly=order)
1881            else:
1882                f.set_function(poly=order)
1883
1884            rows = range(workscan.nrow())
1885            if len(rows) > 0:
1886                self.blpars = []
1887
1888            for r in rows:
1889                # take into account flagtra info (CAS-1434)
1890                flagtra = workscan._getmask(r)
1891                actualmask = mask[:]
1892                if len(actualmask) == 0:
1893                    actualmask = list(flagtra[:])
1894                else:
1895                    if len(actualmask) != len(flagtra):
1896                        raise RuntimeError, "Mask and flagtra have different length"
1897                    else:
1898                        for i in range(0, len(actualmask)):
1899                            actualmask[i] = actualmask[i] and flagtra[i]
1900                f.set_scan(workscan, actualmask)
1901                f.x = workscan._getabcissa(r)
1902                f.y = workscan._getspectrum(r)
1903                f.data = None
1904                f.fit()
1905                if plot:
1906                    f.plot(residual=True)
1907                    x = raw_input("Accept fit ( [y]/n ): ")
1908                    if x.upper() == 'N':
1909                        self.blpars.append(None)
1910                        continue
1911                workscan._setspectrum(f.fitter.getresidual(), r)
1912                self.blpars.append(f.get_parameters())
1913
1914            if plot:
1915                f._p.unmap()
1916                f._p = None
1917            workscan._add_history("poly_baseline", varlist)
1918            if insitu:
1919                self._assign(workscan)
1920            else:
1921                return workscan
1922        except RuntimeError:
1923            msg = "The fit failed, possibly because it didn't converge."
1924            raise RuntimeError(msg)
1925
1926
1927    def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
1928                           threshold=3, chan_avg_limit=1, plot=False,
1929                           insitu=None):
1930        """\
1931        Return a scan which has been baselined (all rows) by a polynomial.
1932        Spectral lines are detected first using linefinder and masked out
1933        to avoid them affecting the baseline solution.
1934
1935        Parameters:
1936
1937            mask:       an optional mask retreived from scantable
1938
1939            edge:       an optional number of channel to drop at the edge of
1940                        spectrum. If only one value is
1941                        specified, the same number will be dropped from
1942                        both sides of the spectrum. Default is to keep
1943                        all channels. Nested tuples represent individual
1944                        edge selection for different IFs (a number of spectral
1945                        channels can be different)
1946
1947            order:      the order of the polynomial (default is 0)
1948
1949            threshold:  the threshold used by line finder. It is better to
1950                        keep it large as only strong lines affect the
1951                        baseline solution.
1952
1953            chan_avg_limit:
1954                        a maximum number of consequtive spectral channels to
1955                        average during the search of weak and broad lines.
1956                        The default is no averaging (and no search for weak
1957                        lines). If such lines can affect the fitted baseline
1958                        (e.g. a high order polynomial is fitted), increase this
1959                        parameter (usually values up to 8 are reasonable). Most
1960                        users of this method should find the default value
1961                        sufficient.
1962
1963            plot:       plot the fit and the residual. In this each
1964                        indivual fit has to be approved, by typing 'y'
1965                        or 'n'
1966
1967            insitu:     if False a new scantable is returned.
1968                        Otherwise, the scaling is done in-situ
1969                        The default is taken from .asaprc (False)
1970
1971
1972        Example::
1973
1974            scan2 = scan.auto_poly_baseline(order=7, insitu=False)
1975
1976        """
1977        if insitu is None: insitu = rcParams['insitu']
1978        varlist = vars()
1979        from asap.asapfitter import fitter
1980        from asap.asaplinefind import linefinder
1981        from asap import _is_sequence_or_number as _is_valid
1982
1983        # check whether edge is set up for each IF individually
1984        individualedge = False;
1985        if len(edge) > 1:
1986            if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1987                individualedge = True;
1988
1989        if not _is_valid(edge, int) and not individualedge:
1990            raise ValueError, "Parameter 'edge' has to be an integer or a \
1991            pair of integers specified as a tuple. Nested tuples are allowed \
1992            to make individual selection for different IFs."
1993
1994        curedge = (0, 0)
1995        if individualedge:
1996            for edgepar in edge:
1997                if not _is_valid(edgepar, int):
1998                    raise ValueError, "Each element of the 'edge' tuple has \
1999                                       to be a pair of integers or an integer."
2000        else:
2001            curedge = edge;
2002
2003        # setup fitter
2004        f = fitter()
2005        f.set_function(poly=order)
2006
2007        # setup line finder
2008        fl = linefinder()
2009        fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
2010
2011        if not insitu:
2012            workscan = self.copy()
2013        else:
2014            workscan = self
2015
2016        fl.set_scan(workscan)
2017
2018        rows = range(workscan.nrow())
2019        # Save parameters of baseline fits & masklists as a class attribute.
2020        # NOTICE: It does not reflect changes in scantable!
2021        if len(rows) > 0:
2022            self.blpars=[]
2023            self.masklists=[]
2024        asaplog.push("Processing:")
2025        for r in rows:
2026            msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
2027                (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
2028                 workscan.getpol(r), workscan.getcycle(r))
2029            asaplog.push(msg, False)
2030
2031            # figure out edge parameter
2032            if individualedge:
2033                if len(edge) >= workscan.getif(r):
2034                    raise RuntimeError, "Number of edge elements appear to " \
2035                                        "be less than the number of IFs"
2036                    curedge = edge[workscan.getif(r)]
2037
2038            # take into account flagtra info (CAS-1434)
2039            flagtra = workscan._getmask(r)
2040            actualmask = mask[:]
2041            if len(actualmask) == 0:
2042                actualmask = list(flagtra[:])
2043            else:
2044                if len(actualmask) != len(flagtra):
2045                    raise RuntimeError, "Mask and flagtra have different length"
2046                else:
2047                    for i in range(0, len(actualmask)):
2048                        actualmask[i] = actualmask[i] and flagtra[i]
2049
2050            # setup line finder
2051            fl.find_lines(r, actualmask, curedge)
2052            outmask=fl.get_mask()
2053            f.set_scan(workscan, fl.get_mask())
2054            f.x = workscan._getabcissa(r)
2055            f.y = workscan._getspectrum(r)
2056            f.data = None
2057            f.fit()
2058
2059            # Show mask list
2060            masklist=workscan.get_masklist(fl.get_mask(),row=r)
2061            msg = "mask range: "+str(masklist)
2062            asaplog.push(msg, False)
2063
2064            if plot:
2065                f.plot(residual=True)
2066                x = raw_input("Accept fit ( [y]/n ): ")
2067                if x.upper() == 'N':
2068                    self.blpars.append(None)
2069                    self.masklists.append(None)
2070                    continue
2071
2072            workscan._setspectrum(f.fitter.getresidual(), r)
2073            self.blpars.append(f.get_parameters())
2074            self.masklists.append(masklist)
2075        if plot:
2076            f._p.unmap()
2077            f._p = None
2078        workscan._add_history("auto_poly_baseline", varlist)
2079        if insitu:
2080            self._assign(workscan)
2081        else:
2082            return workscan
2083
2084    @asaplog_post_dec
2085    def rotate_linpolphase(self, angle):
2086        """\
2087        Rotate the phase of the complex polarization O=Q+iU correlation.
2088        This is always done in situ in the raw data.  So if you call this
2089        function more than once then each call rotates the phase further.
2090
2091        Parameters:
2092
2093            angle:   The angle (degrees) to rotate (add) by.
2094
2095        Example::
2096
2097            scan.rotate_linpolphase(2.3)
2098
2099        """
2100        varlist = vars()
2101        self._math._rotate_linpolphase(self, angle)
2102        self._add_history("rotate_linpolphase", varlist)
2103        return
2104
2105    @asaplog_post_dec
2106    def rotate_xyphase(self, angle):
2107        """\
2108        Rotate the phase of the XY correlation.  This is always done in situ
2109        in the data.  So if you call this function more than once
2110        then each call rotates the phase further.
2111
2112        Parameters:
2113
2114            angle:   The angle (degrees) to rotate (add) by.
2115
2116        Example::
2117
2118            scan.rotate_xyphase(2.3)
2119
2120        """
2121        varlist = vars()
2122        self._math._rotate_xyphase(self, angle)
2123        self._add_history("rotate_xyphase", varlist)
2124        return
2125
2126    @asaplog_post_dec
2127    def swap_linears(self):
2128        """\
2129        Swap the linear polarisations XX and YY, or better the first two
2130        polarisations as this also works for ciculars.
2131        """
2132        varlist = vars()
2133        self._math._swap_linears(self)
2134        self._add_history("swap_linears", varlist)
2135        return
2136
2137    @asaplog_post_dec
2138    def invert_phase(self):
2139        """\
2140        Invert the phase of the complex polarisation
2141        """
2142        varlist = vars()
2143        self._math._invert_phase(self)
2144        self._add_history("invert_phase", varlist)
2145        return
2146
2147    @asaplog_post_dec
2148    def add(self, offset, insitu=None):
2149        """\
2150        Return a scan where all spectra have the offset added
2151
2152        Parameters:
2153
2154            offset:      the offset
2155
2156            insitu:      if False a new scantable is returned.
2157                         Otherwise, the scaling is done in-situ
2158                         The default is taken from .asaprc (False)
2159
2160        """
2161        if insitu is None: insitu = rcParams['insitu']
2162        self._math._setinsitu(insitu)
2163        varlist = vars()
2164        s = scantable(self._math._unaryop(self, offset, "ADD", False))
2165        s._add_history("add", varlist)
2166        if insitu:
2167            self._assign(s)
2168        else:
2169            return s
2170
2171    @asaplog_post_dec
2172    def scale(self, factor, tsys=True, insitu=None):
2173        """\
2174
2175        Return a scan where all spectra are scaled by the give 'factor'
2176
2177        Parameters:
2178
2179            factor:      the scaling factor (float or 1D float list)
2180
2181            insitu:      if False a new scantable is returned.
2182                         Otherwise, the scaling is done in-situ
2183                         The default is taken from .asaprc (False)
2184
2185            tsys:        if True (default) then apply the operation to Tsys
2186                         as well as the data
2187
2188        """
2189        if insitu is None: insitu = rcParams['insitu']
2190        self._math._setinsitu(insitu)
2191        varlist = vars()
2192        s = None
2193        import numpy
2194        if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
2195            if isinstance(factor[0], list) or isinstance(factor[0], numpy.ndarray):
2196                from asapmath import _array2dOp
2197                s = _array2dOp( self.copy(), factor, "MUL", tsys )
2198            else:
2199                s = scantable( self._math._arrayop( self.copy(), factor, "MUL", tsys ) )
2200        else:
2201            s = scantable(self._math._unaryop(self.copy(), factor, "MUL", tsys))
2202        s._add_history("scale", varlist)
2203        if insitu:
2204            self._assign(s)
2205        else:
2206            return s
2207
2208    def set_sourcetype(self, match, matchtype="pattern",
2209                       sourcetype="reference"):
2210        """\
2211        Set the type of the source to be an source or reference scan
2212        using the provided pattern.
2213
2214        Parameters:
2215
2216            match:          a Unix style pattern, regular expression or selector
2217
2218            matchtype:      'pattern' (default) UNIX style pattern or
2219                            'regex' regular expression
2220
2221            sourcetype:     the type of the source to use (source/reference)
2222
2223        """
2224        varlist = vars()
2225        basesel = self.get_selection()
2226        stype = -1
2227        if sourcetype.lower().startswith("r"):
2228            stype = 1
2229        elif sourcetype.lower().startswith("s"):
2230            stype = 0
2231        else:
2232            raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
2233        if matchtype.lower().startswith("p"):
2234            matchtype = "pattern"
2235        elif matchtype.lower().startswith("r"):
2236            matchtype = "regex"
2237        else:
2238            raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
2239        sel = selector()
2240        if isinstance(match, selector):
2241            sel = match
2242        else:
2243            sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
2244        self.set_selection(basesel+sel)
2245        self._setsourcetype(stype)
2246        self.set_selection(basesel)
2247        self._add_history("set_sourcetype", varlist)
2248
2249    @asaplog_post_dec
2250    @preserve_selection
2251    def auto_quotient(self, preserve=True, mode='paired', verify=False):
2252        """\
2253        This function allows to build quotients automatically.
2254        It assumes the observation to have the same number of
2255        "ons" and "offs"
2256
2257        Parameters:
2258
2259            preserve:       you can preserve (default) the continuum or
2260                            remove it.  The equations used are
2261
2262                            preserve: Output = Toff * (on/off) - Toff
2263
2264                            remove:   Output = Toff * (on/off) - Ton
2265
2266            mode:           the on/off detection mode
2267                            'paired' (default)
2268                            identifies 'off' scans by the
2269                            trailing '_R' (Mopra/Parkes) or
2270                            '_e'/'_w' (Tid) and matches
2271                            on/off pairs from the observing pattern
2272                            'time'
2273                            finds the closest off in time
2274
2275        .. todo:: verify argument is not implemented
2276
2277        """
2278        varlist = vars()
2279        modes = ["time", "paired"]
2280        if not mode in modes:
2281            msg = "please provide valid mode. Valid modes are %s" % (modes)
2282            raise ValueError(msg)
2283        s = None
2284        if mode.lower() == "paired":
2285            sel = self.get_selection()
2286            sel.set_query("SRCTYPE==psoff")
2287            self.set_selection(sel)
2288            offs = self.copy()
2289            sel.set_query("SRCTYPE==pson")
2290            self.set_selection(sel)
2291            ons = self.copy()
2292            s = scantable(self._math._quotient(ons, offs, preserve))
2293        elif mode.lower() == "time":
2294            s = scantable(self._math._auto_quotient(self, mode, preserve))
2295        s._add_history("auto_quotient", varlist)
2296        return s
2297
2298    @asaplog_post_dec
2299    def mx_quotient(self, mask = None, weight='median', preserve=True):
2300        """\
2301        Form a quotient using "off" beams when observing in "MX" mode.
2302
2303        Parameters:
2304
2305            mask:           an optional mask to be used when weight == 'stddev'
2306
2307            weight:         How to average the off beams.  Default is 'median'.
2308
2309            preserve:       you can preserve (default) the continuum or
2310                            remove it.  The equations used are:
2311
2312                                preserve: Output = Toff * (on/off) - Toff
2313
2314                                remove:   Output = Toff * (on/off) - Ton
2315
2316        """
2317        mask = mask or ()
2318        varlist = vars()
2319        on = scantable(self._math._mx_extract(self, 'on'))
2320        preoff = scantable(self._math._mx_extract(self, 'off'))
2321        off = preoff.average_time(mask=mask, weight=weight, scanav=False)
2322        from asapmath  import quotient
2323        q = quotient(on, off, preserve)
2324        q._add_history("mx_quotient", varlist)
2325        return q
2326
2327    @asaplog_post_dec
2328    def freq_switch(self, insitu=None):
2329        """\
2330        Apply frequency switching to the data.
2331
2332        Parameters:
2333
2334            insitu:      if False a new scantable is returned.
2335                         Otherwise, the swictching is done in-situ
2336                         The default is taken from .asaprc (False)
2337
2338        """
2339        if insitu is None: insitu = rcParams['insitu']
2340        self._math._setinsitu(insitu)
2341        varlist = vars()
2342        s = scantable(self._math._freqswitch(self))
2343        s._add_history("freq_switch", varlist)
2344        if insitu:
2345            self._assign(s)
2346        else:
2347            return s
2348
2349    @asaplog_post_dec
2350    def recalc_azel(self):
2351        """Recalculate the azimuth and elevation for each position."""
2352        varlist = vars()
2353        self._recalcazel()
2354        self._add_history("recalc_azel", varlist)
2355        return
2356
2357    @asaplog_post_dec
2358    def __add__(self, other):
2359        varlist = vars()
2360        s = None
2361        if isinstance(other, scantable):
2362            s = scantable(self._math._binaryop(self, other, "ADD"))
2363        elif isinstance(other, float):
2364            s = scantable(self._math._unaryop(self, other, "ADD", False))
2365        else:
2366            raise TypeError("Other input is not a scantable or float value")
2367        s._add_history("operator +", varlist)
2368        return s
2369
2370    @asaplog_post_dec
2371    def __sub__(self, other):
2372        """
2373        implicit on all axes and on Tsys
2374        """
2375        varlist = vars()
2376        s = None
2377        if isinstance(other, scantable):
2378            s = scantable(self._math._binaryop(self, other, "SUB"))
2379        elif isinstance(other, float):
2380            s = scantable(self._math._unaryop(self, other, "SUB", False))
2381        else:
2382            raise TypeError("Other input is not a scantable or float value")
2383        s._add_history("operator -", varlist)
2384        return s
2385
2386    @asaplog_post_dec
2387    def __mul__(self, other):
2388        """
2389        implicit on all axes and on Tsys
2390        """
2391        varlist = vars()
2392        s = None
2393        if isinstance(other, scantable):
2394            s = scantable(self._math._binaryop(self, other, "MUL"))
2395        elif isinstance(other, float):
2396            s = scantable(self._math._unaryop(self, other, "MUL", False))
2397        else:
2398            raise TypeError("Other input is not a scantable or float value")
2399        s._add_history("operator *", varlist)
2400        return s
2401
2402
2403    @asaplog_post_dec
2404    def __div__(self, other):
2405        """
2406        implicit on all axes and on Tsys
2407        """
2408        varlist = vars()
2409        s = None
2410        if isinstance(other, scantable):
2411            s = scantable(self._math._binaryop(self, other, "DIV"))
2412        elif isinstance(other, float):
2413            if other == 0.0:
2414                raise ZeroDivisionError("Dividing by zero is not recommended")
2415            s = scantable(self._math._unaryop(self, other, "DIV", False))
2416        else:
2417            raise TypeError("Other input is not a scantable or float value")
2418        s._add_history("operator /", varlist)
2419        return s
2420
2421    @asaplog_post_dec
2422    def get_fit(self, row=0):
2423        """\
2424        Print or return the stored fits for a row in the scantable
2425
2426        Parameters:
2427
2428            row:    the row which the fit has been applied to.
2429
2430        """
2431        if row > self.nrow():
2432            return
2433        from asap.asapfit import asapfit
2434        fit = asapfit(self._getfit(row))
2435        asaplog.push( '%s' %(fit) )
2436        return fit.as_dict()
2437
2438    def flag_nans(self):
2439        """\
2440        Utility function to flag NaN values in the scantable.
2441        """
2442        import numpy
2443        basesel = self.get_selection()
2444        for i in range(self.nrow()):
2445            sel = self.get_row_selector(i)
2446            self.set_selection(basesel+sel)
2447            nans = numpy.isnan(self._getspectrum(0))
2448        if numpy.any(nans):
2449            bnans = [ bool(v) for v in nans]
2450            self.flag(bnans)
2451        self.set_selection(basesel)
2452
2453    def get_row_selector(self, rowno):
2454        return selector(beams=self.getbeam(rowno),
2455                        ifs=self.getif(rowno),
2456                        pols=self.getpol(rowno),
2457                        scans=self.getscan(rowno),
2458                        cycles=self.getcycle(rowno))
2459
2460    def _add_history(self, funcname, parameters):
2461        if not rcParams['scantable.history']:
2462            return
2463        # create date
2464        sep = "##"
2465        from datetime import datetime
2466        dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
2467        hist = dstr+sep
2468        hist += funcname+sep#cdate+sep
2469        if parameters.has_key('self'): del parameters['self']
2470        for k, v in parameters.iteritems():
2471            if type(v) is dict:
2472                for k2, v2 in v.iteritems():
2473                    hist += k2
2474                    hist += "="
2475                    if isinstance(v2, scantable):
2476                        hist += 'scantable'
2477                    elif k2 == 'mask':
2478                        if isinstance(v2, list) or isinstance(v2, tuple):
2479                            hist += str(self._zip_mask(v2))
2480                        else:
2481                            hist += str(v2)
2482                    else:
2483                        hist += str(v2)
2484            else:
2485                hist += k
2486                hist += "="
2487                if isinstance(v, scantable):
2488                    hist += 'scantable'
2489                elif k == 'mask':
2490                    if isinstance(v, list) or isinstance(v, tuple):
2491                        hist += str(self._zip_mask(v))
2492                    else:
2493                        hist += str(v)
2494                else:
2495                    hist += str(v)
2496            hist += sep
2497        hist = hist[:-2] # remove trailing '##'
2498        self._addhistory(hist)
2499
2500
2501    def _zip_mask(self, mask):
2502        mask = list(mask)
2503        i = 0
2504        segments = []
2505        while mask[i:].count(1):
2506            i += mask[i:].index(1)
2507            if mask[i:].count(0):
2508                j = i + mask[i:].index(0)
2509            else:
2510                j = len(mask)
2511            segments.append([i, j])
2512            i = j
2513        return segments
2514
2515    def _get_ordinate_label(self):
2516        fu = "("+self.get_fluxunit()+")"
2517        import re
2518        lbl = "Intensity"
2519        if re.match(".K.", fu):
2520            lbl = "Brightness Temperature "+ fu
2521        elif re.match(".Jy.", fu):
2522            lbl = "Flux density "+ fu
2523        return lbl
2524
2525    def _check_ifs(self):
2526        nchans = [self.nchan(i) for i in range(self.nif(-1))]
2527        nchans = filter(lambda t: t > 0, nchans)
2528        return (sum(nchans)/len(nchans) == nchans[0])
2529
2530    @asaplog_post_dec
2531    def _fill(self, names, unit, average, getpt, antenna):
2532        first = True
2533        fullnames = []
2534        for name in names:
2535            name = os.path.expandvars(name)
2536            name = os.path.expanduser(name)
2537            if not os.path.exists(name):
2538                msg = "File '%s' does not exists" % (name)
2539                raise IOError(msg)
2540            fullnames.append(name)
2541        if average:
2542            asaplog.push('Auto averaging integrations')
2543        stype = int(rcParams['scantable.storage'].lower() == 'disk')
2544        for name in fullnames:
2545            tbl = Scantable(stype)
2546            r = filler(tbl)
2547            rx = rcParams['scantable.reference']
2548            r.setreferenceexpr(rx)
2549            msg = "Importing %s..." % (name)
2550            asaplog.push(msg, False)
2551            r.open(name)# antenna, -1, -1, getpt)
2552            r.fill()
2553            if average:
2554                tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
2555            if not first:
2556                tbl = self._math._merge([self, tbl])
2557            Scantable.__init__(self, tbl)
2558            r.close()
2559            del r, tbl
2560            first = False
2561            #flush log
2562        asaplog.post()
2563        if unit is not None:
2564            self.set_fluxunit(unit)
2565        if not is_casapy():
2566            self.set_freqframe(rcParams['scantable.freqframe'])
2567
2568    def __getitem__(self, key):
2569        if key < 0:
2570            key += self.nrow()
2571        if key >= self.nrow():
2572            raise IndexError("Row index out of range.")
2573        return self._getspectrum(key)
2574
2575    def __setitem__(self, key, value):
2576        if key < 0:
2577            key += self.nrow()
2578        if key >= self.nrow():
2579            raise IndexError("Row index out of range.")
2580        if not hasattr(value, "__len__") or \
2581                len(value) > self.nchan(self.getif(key)):
2582            raise ValueError("Spectrum length doesn't match.")
2583        return self._setspectrum(value, key)
2584
2585    def __len__(self):
2586        return self.nrow()
2587
2588    def __iter__(self):
2589        for i in range(len(self)):
2590            yield self[i]
Note: See TracBrowser for help on using the repository browser.