source: trunk/python/scantable.py @ 1862

Last change on this file since 1862 was 1862, checked in by Malte Marquarding, 14 years ago

renamed print_log_dec to more explicit asaplog_post_dec

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 85.6 KB
Line 
1"""This module defines the scantable class."""
2
3import os
4try:
5    from functools import wraps as wraps_dec
6except ImportError:
7    from asap.compatibility import wraps as wraps_dec
8
9from asap.env import is_casapy
10from asap._asap import Scantable
11from asap._asap import filler
12from asap.parameters import rcParams
13from asap.logging import asaplog, asaplog_post_dec
14from asap.selector import selector
15from asap.linecatalog import linecatalog
16from asap.coordinate import coordinate
17from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
18
19
20def preserve_selection(func):
21    @wraps_dec(func)
22    def wrap(obj, *args, **kw):
23        basesel = obj.get_selection()
24        try:
25            val = func(obj, *args, **kw)
26        finally:
27            obj.set_selection(basesel)
28        return val
29    return wrap
30
31def is_scantable(filename):
32    """Is the given file a scantable?
33
34    Parameters:
35
36        filename: the name of the file/directory to test
37
38    """
39    return (os.path.isdir(filename)
40            and not os.path.exists(filename+'/table.f1')
41            and os.path.exists(filename+'/table.info'))
42
43
44class scantable(Scantable):
45    """\
46        The ASAP container for scans (single-dish data).
47    """
48
49    @asaplog_post_dec
50    def __init__(self, filename, average=None, unit=None, getpt=None,
51                 antenna=None, parallactify=None):
52        """\
53        Create a scantable from a saved one or make a reference
54
55        Parameters:
56
57            filename:     the name of an asap table on disk
58                          or
59                          the name of a rpfits/sdfits/ms file
60                          (integrations within scans are auto averaged
61                          and the whole file is read) or
62                          [advanced] a reference to an existing scantable
63
64            average:      average all integrations withinb a scan on read.
65                          The default (True) is taken from .asaprc.
66
67            unit:         brightness unit; must be consistent with K or Jy.
68                          Over-rides the default selected by the filler
69                          (input rpfits/sdfits/ms) or replaces the value
70                          in existing scantables
71
72            getpt:        for MeasurementSet input data only:
73                          If True, all pointing data are filled.
74                          The deafult is False, which makes time to load
75                          the MS data faster in some cases.
76
77            antenna:      Antenna selection. integer (id) or string (name or id).
78
79            parallactify: Indicate that the data had been parallatified. Default
80                          is taken from rc file.
81
82        """
83        if average is None:
84            average = rcParams['scantable.autoaverage']
85        if getpt is None:
86            getpt = True
87        if antenna is not None:
88            asaplog.push("Antenna selection currently unsupported."
89                         "Using '0'")
90            asaplog.post('WARN')
91        if antenna is None:
92            antenna = ''
93        elif type(antenna) == int:
94            antenna = '%s' % antenna
95        elif type(antenna) == list:
96            tmpstr = ''
97            for i in range( len(antenna) ):
98                if type(antenna[i]) == int:
99                    tmpstr = tmpstr + ('%s,'%(antenna[i]))
100                elif type(antenna[i]) == str:
101                    tmpstr=tmpstr+antenna[i]+','
102                else:
103                    raise TypeError('Bad antenna selection.')
104            antenna = tmpstr.rstrip(',')
105        parallactify = parallactify or rcParams['scantable.parallactify']
106        varlist = vars()
107        from asap._asap import stmath
108        self._math = stmath( rcParams['insitu'] )
109        if isinstance(filename, Scantable):
110            Scantable.__init__(self, filename)
111        else:
112            if isinstance(filename, str):
113                filename = os.path.expandvars(filename)
114                filename = os.path.expanduser(filename)
115                if not os.path.exists(filename):
116                    s = "File '%s' not found." % (filename)
117                    raise IOError(s)
118                if is_scantable(filename):
119                    ondisk = rcParams['scantable.storage'] == 'disk'
120                    Scantable.__init__(self, filename, ondisk)
121                    if unit is not None:
122                        self.set_fluxunit(unit)
123                    # do not reset to the default freqframe
124                    #self.set_freqframe(rcParams['scantable.freqframe'])
125                elif os.path.isdir(filename) \
126                         and not os.path.exists(filename+'/table.f1'):
127                    msg = "The given file '%s'is not a valid " \
128                          "asap table." % (filename)
129                    raise IOError(msg)
130                else:
131                    self._fill([filename], unit, average, getpt, antenna)
132            elif (isinstance(filename, list) or isinstance(filename, tuple)) \
133                  and isinstance(filename[-1], str):
134                self._fill(filename, unit, average, getpt, antenna)
135        self.parallactify(parallactify)
136        self._add_history("scantable", varlist)
137
138    @asaplog_post_dec
139    def save(self, name=None, format=None, overwrite=False):
140        """\
141        Store the scantable on disk. This can be an asap (aips++) Table,
142        SDFITS or MS2 format.
143
144        Parameters:
145
146            name:        the name of the outputfile. For format "ASCII"
147                         this is the root file name (data in 'name'.txt
148                         and header in 'name'_header.txt)
149
150            format:      an optional file format. Default is ASAP.
151                         Allowed are:
152
153                            * 'ASAP' (save as ASAP [aips++] Table),
154                            * 'SDFITS' (save as SDFITS file)
155                            * 'ASCII' (saves as ascii text file)
156                            * 'MS2' (saves as an casacore MeasurementSet V2)
157                            * 'FITS' (save as image FITS - not readable by class)
158                            * 'CLASS' (save as FITS readable by CLASS)
159
160            overwrite:   If the file should be overwritten if it exists.
161                         The default False is to return with warning
162                         without writing the output. USE WITH CARE.
163
164        Example::
165
166            scan.save('myscan.asap')
167            scan.save('myscan.sdfits', 'SDFITS')
168
169        """
170        from os import path
171        format = format or rcParams['scantable.save']
172        suffix = '.'+format.lower()
173        if name is None or name == "":
174            name = 'scantable'+suffix
175            msg = "No filename given. Using default name %s..." % name
176            asaplog.push(msg)
177        name = path.expandvars(name)
178        if path.isfile(name) or path.isdir(name):
179            if not overwrite:
180                msg = "File %s exists." % name
181                raise IOError(msg)
182        format2 = format.upper()
183        if format2 == 'ASAP':
184            self._save(name)
185        else:
186            from asap._asap import stwriter as stw
187            writer = stw(format2)
188            writer.write(self, name)
189        return
190
191    def copy(self):
192        """Return a copy of this scantable.
193
194        *Note*:
195
196            This makes a full (deep) copy. scan2 = scan1 makes a reference.
197
198        Example::
199
200            copiedscan = scan.copy()
201
202        """
203        sd = scantable(Scantable._copy(self))
204        return sd
205
206    def drop_scan(self, scanid=None):
207        """\
208        Return a new scantable where the specified scan number(s) has(have)
209        been dropped.
210
211        Parameters:
212
213            scanid:    a (list of) scan number(s)
214
215        """
216        from asap import _is_sequence_or_number as _is_valid
217        from asap import _to_list
218        from asap import unique
219        if not _is_valid(scanid):
220            raise RuntimeError( 'Please specify a scanno to drop from the scantable' )
221        scanid = _to_list(scanid)
222        allscans = unique([ self.getscan(i) for i in range(self.nrow())])
223        for sid in scanid: allscans.remove(sid)
224        if len(allscans) == 0:
225            raise ValueError("Can't remove all scans")
226        sel = selector(scans=allscans)
227        return self._select_copy(sel)
228
229    def _select_copy(self, selection):
230        orig = self.get_selection()
231        self.set_selection(orig+selection)
232        cp = self.copy()
233        self.set_selection(orig)
234        return cp
235
236    def get_scan(self, scanid=None):
237        """\
238        Return a specific scan (by scanno) or collection of scans (by
239        source name) in a new scantable.
240
241        *Note*:
242
243            See scantable.drop_scan() for the inverse operation.
244
245        Parameters:
246
247            scanid:    a (list of) scanno or a source name, unix-style
248                       patterns are accepted for source name matching, e.g.
249                       '*_R' gets all 'ref scans
250
251        Example::
252
253            # get all scans containing the source '323p459'
254            newscan = scan.get_scan('323p459')
255            # get all 'off' scans
256            refscans = scan.get_scan('*_R')
257            # get a susbset of scans by scanno (as listed in scan.summary())
258            newscan = scan.get_scan([0, 2, 7, 10])
259
260        """
261        if scanid is None:
262            raise RuntimeError( 'Please specify a scan no or name to '
263                                'retrieve from the scantable' )
264        try:
265            bsel = self.get_selection()
266            sel = selector()
267            if type(scanid) is str:
268                sel.set_name(scanid)
269                return self._select_copy(sel)
270            elif type(scanid) is int:
271                sel.set_scans([scanid])
272                return self._select_copy(sel)
273            elif type(scanid) is list:
274                sel.set_scans(scanid)
275                return self._select_copy(sel)
276            else:
277                msg = "Illegal scanid type, use 'int' or 'list' if ints."
278                raise TypeError(msg)
279        except RuntimeError:
280            raise
281
282    def __str__(self):
283        return Scantable._summary(self, True)
284
285    def summary(self, filename=None):
286        """\
287        Print a summary of the contents of this scantable.
288
289        Parameters:
290
291            filename:    the name of a file to write the putput to
292                         Default - no file output
293
294        """
295        info = Scantable._summary(self, True)
296        if filename is not None:
297            if filename is "":
298                filename = 'scantable_summary.txt'
299            from os.path import expandvars, isdir
300            filename = expandvars(filename)
301            if not isdir(filename):
302                data = open(filename, 'w')
303                data.write(info)
304                data.close()
305            else:
306                msg = "Illegal file name '%s'." % (filename)
307                raise IOError(msg)
308        return page(info)
309
310    def get_spectrum(self, rowno):
311        """Return the spectrum for the current row in the scantable as a list.
312
313        Parameters:
314
315             rowno:   the row number to retrieve the spectrum from
316
317        """
318        return self._getspectrum(rowno)
319
320    def get_mask(self, rowno):
321        """Return the mask for the current row in the scantable as a list.
322
323        Parameters:
324
325             rowno:   the row number to retrieve the mask from
326
327        """
328        return self._getmask(rowno)
329
330    def set_spectrum(self, spec, rowno):
331        """Return the spectrum for the current row in the scantable as a list.
332
333        Parameters:
334
335             spec:   the new spectrum
336
337             rowno:  the row number to set the spectrum for
338
339        """
340        assert(len(spec) == self.nchan())
341        return self._setspectrum(spec, rowno)
342
343    def get_coordinate(self, rowno):
344        """Return the (spectral) coordinate for a a given 'rowno'.
345
346        *Note*:
347
348            * This coordinate is only valid until a scantable method modifies
349              the frequency axis.
350            * This coordinate does contain the original frequency set-up
351              NOT the new frame. The conversions however are done using the user
352              specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
353              use scantable.freq_align first. Without it there is no closure,
354              i.e.::
355
356                  c = myscan.get_coordinate(0)
357                  c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
358
359        Parameters:
360
361             rowno:    the row number for the spectral coordinate
362
363        """
364        return coordinate(Scantable.get_coordinate(self, rowno))
365
366    def get_selection(self):
367        """\
368        Get the selection object currently set on this scantable.
369
370        Example::
371
372            sel = scan.get_selection()
373            sel.set_ifs(0)              # select IF 0
374            scan.set_selection(sel)     # apply modified selection
375
376        """
377        return selector(self._getselection())
378
379    def set_selection(self, selection=None, **kw):
380        """\
381        Select a subset of the data. All following operations on this scantable
382        are only applied to thi selection.
383
384        Parameters:
385
386            selection:    a selector object (default unset the selection), or
387                          any combination of "pols", "ifs", "beams", "scans",
388                          "cycles", "name", "query"
389
390        Examples::
391
392            sel = selector()         # create a selection object
393            self.set_scans([0, 3])    # select SCANNO 0 and 3
394            scan.set_selection(sel)  # set the selection
395            scan.summary()           # will only print summary of scanno 0 an 3
396            scan.set_selection()     # unset the selection
397            # or the equivalent
398            scan.set_selection(scans=[0,3])
399            scan.summary()           # will only print summary of scanno 0 an 3
400            scan.set_selection()     # unset the selection
401
402        """
403        if selection is None:
404            # reset
405            if len(kw) == 0:
406                selection = selector()
407            else:
408                # try keywords
409                for k in kw:
410                    if k not in selector.fields:
411                        raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
412                selection = selector(**kw)
413        self._setselection(selection)
414
415    def get_row(self, row=0, insitu=None):
416        """\
417        Select a row in the scantable.
418        Return a scantable with single row.
419
420        Parameters:
421
422            row:    row no of integration, default is 0.
423            insitu: if False a new scantable is returned. Otherwise, the
424                    scaling is done in-situ. The default is taken from .asaprc
425                    (False)
426
427        """
428        if insitu is None: insitu = rcParams['insitu']
429        if not insitu:
430            workscan = self.copy()
431        else:
432            workscan = self
433        # Select a row
434        sel=selector()
435        sel.set_scans([workscan.getscan(row)])
436        sel.set_cycles([workscan.getcycle(row)])
437        sel.set_beams([workscan.getbeam(row)])
438        sel.set_ifs([workscan.getif(row)])
439        sel.set_polarisations([workscan.getpol(row)])
440        sel.set_name(workscan._getsourcename(row))
441        workscan.set_selection(sel)
442        if not workscan.nrow() == 1:
443            msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
444            raise RuntimeError(msg)
445        del sel
446        if insitu:
447            self._assign(workscan)
448        else:
449            return workscan
450
451    @asaplog_post_dec
452    def stats(self, stat='stddev', mask=None, form='3.3f'):
453        """\
454        Determine the specified statistic of the current beam/if/pol
455        Takes a 'mask' as an optional parameter to specify which
456        channels should be excluded.
457
458        Parameters:
459
460            stat:    'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
461                     'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
462
463            mask:    an optional mask specifying where the statistic
464                     should be determined.
465
466            form:    format string to print statistic values
467
468        Example::
469
470            scan.set_unit('channel')
471            msk = scan.create_mask([100, 200], [500, 600])
472            scan.stats(stat='mean', mask=m)
473
474        """
475        mask = mask or []
476        if not self._check_ifs():
477            raise ValueError("Cannot apply mask as the IFs have different "
478                             "number of channels. Please use setselection() "
479                             "to select individual IFs")
480        rtnabc = False
481        if stat.lower().endswith('_abc'): rtnabc = True
482        getchan = False
483        if stat.lower().startswith('min') or stat.lower().startswith('max'):
484            chan = self._math._minmaxchan(self, mask, stat)
485            getchan = True
486            statvals = []
487        if not rtnabc: statvals = self._math._stats(self, mask, stat)
488
489        #def cb(i):
490        #    return statvals[i]
491
492        #return self._row_callback(cb, stat)
493
494        label=stat
495        #callback=cb
496        out = ""
497        #outvec = []
498        sep = '-'*50
499        for i in range(self.nrow()):
500            refstr = ''
501            statunit= ''
502            if getchan:
503                qx, qy = self.chan2data(rowno=i, chan=chan[i])
504                if rtnabc:
505                    statvals.append(qx['value'])
506                    refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
507                    statunit= '['+qx['unit']+']'
508                else:
509                    refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
510
511            tm = self._gettime(i)
512            src = self._getsourcename(i)
513            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
514            out += 'Time[%s]:\n' % (tm)
515            if self.nbeam(-1) > 1:
516                out +=  ' Beam[%d] ' % (self.getbeam(i))
517            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (self.getif(i))
518            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (self.getpol(i))
519            #outvec.append(callback(i))
520            #out += ('= %'+form) % (outvec[i]) +'   '+refstr+'\n'
521            out += ('= %'+form) % (statvals[i]) +'   '+refstr+'\n'
522            out +=  sep+"\n"
523
524
525        import os
526        if os.environ.has_key( 'USER' ):
527            usr = os.environ['USER']
528        else:
529            import commands
530            usr = commands.getoutput( 'whoami' )
531        tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
532        f = open(tmpfile,'w')
533        print >> f, sep
534        print >> f, ' %s %s' % (label, statunit)
535        print >> f, sep
536        print >> f, out
537        f.close()
538        f = open(tmpfile,'r')
539        x = f.readlines()
540        f.close()
541        asaplog.push(''.join(x), False)
542
543        return statvals
544
545    def chan2data(self, rowno=0, chan=0):
546        """\
547        Returns channel/frequency/velocity and spectral value
548        at an arbitrary row and channel in the scantable.
549
550        Parameters:
551
552            rowno:   a row number in the scantable. Default is the
553                     first row, i.e. rowno=0
554
555            chan:    a channel in the scantable. Default is the first
556                     channel, i.e. pos=0
557
558        """
559        if isinstance(rowno, int) and isinstance(chan, int):
560            qx = {'unit': self.get_unit(),
561                  'value': self._getabcissa(rowno)[chan]}
562            qy = {'unit': self.get_fluxunit(),
563                  'value': self._getspectrum(rowno)[chan]}
564            return qx, qy
565
566    def stddev(self, mask=None):
567        """\
568        Determine the standard deviation of the current beam/if/pol
569        Takes a 'mask' as an optional parameter to specify which
570        channels should be excluded.
571
572        Parameters:
573
574            mask:    an optional mask specifying where the standard
575                     deviation should be determined.
576
577        Example::
578
579            scan.set_unit('channel')
580            msk = scan.create_mask([100, 200], [500, 600])
581            scan.stddev(mask=m)
582
583        """
584        return self.stats(stat='stddev', mask=mask);
585
586
587    def get_column_names(self):
588        """\
589        Return a  list of column names, which can be used for selection.
590        """
591        return list(Scantable.get_column_names(self))
592
593    def get_tsys(self, row=-1):
594        """\
595        Return the System temperatures.
596
597        Parameters:
598
599            row:    the rowno to get the information for. (default all rows)
600
601        Returns:
602
603            a list of Tsys values for the current selection
604
605        """
606        if row > -1:
607            return self._get_column(self._gettsys, row)
608        return self._row_callback(self._gettsys, "Tsys")
609
610
611    def get_weather(self, row=-1):
612        """\
613        Return the weather informations.
614
615        Parameters:
616
617            row:    the rowno to get the information for. (default all rows)
618
619        Returns:
620
621            a dict or list of of dicts of values for the current selection
622
623        """
624
625        values = self._get_column(self._get_weather, row)
626        if row > -1:
627            return {'temperature': values[0],
628                    'pressure': values[1], 'humidity' : values[2],
629                    'windspeed' : values[3], 'windaz' : values[4]
630                    }
631        else:
632            out = []
633            for r in values:
634
635                out.append({'temperature': r[0],
636                            'pressure': r[1], 'humidity' : r[2],
637                            'windspeed' : r[3], 'windaz' : r[4]
638                    })
639            return out
640
641    def _row_callback(self, callback, label):
642        out = ""
643        outvec = []
644        sep = '-'*50
645        for i in range(self.nrow()):
646            tm = self._gettime(i)
647            src = self._getsourcename(i)
648            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
649            out += 'Time[%s]:\n' % (tm)
650            if self.nbeam(-1) > 1:
651                out +=  ' Beam[%d] ' % (self.getbeam(i))
652            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (self.getif(i))
653            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (self.getpol(i))
654            outvec.append(callback(i))
655            out += '= %3.3f\n' % (outvec[i])
656            out +=  sep+'\n'
657
658        asaplog.push(sep)
659        asaplog.push(" %s" % (label))
660        asaplog.push(sep)
661        asaplog.push(out)
662        asaplog.post()
663        return outvec
664
665    def _get_column(self, callback, row=-1):
666        """
667        """
668        if row == -1:
669            return [callback(i) for i in range(self.nrow())]
670        else:
671            if  0 <= row < self.nrow():
672                return callback(row)
673
674
675    def get_time(self, row=-1, asdatetime=False):
676        """\
677        Get a list of time stamps for the observations.
678        Return a datetime object for each integration time stamp in the scantable.
679
680        Parameters:
681
682            row:          row no of integration. Default -1 return all rows
683
684            asdatetime:   return values as datetime objects rather than strings
685
686        """
687        from time import strptime
688        from datetime import datetime
689        times = self._get_column(self._gettime, row)
690        if not asdatetime:
691            return times
692        format = "%Y/%m/%d/%H:%M:%S"
693        if isinstance(times, list):
694            return [datetime(*strptime(i, format)[:6]) for i in times]
695        else:
696            return datetime(*strptime(times, format)[:6])
697
698
699    def get_inttime(self, row=-1):
700        """\
701        Get a list of integration times for the observations.
702        Return a time in seconds for each integration in the scantable.
703
704        Parameters:
705
706            row:    row no of integration. Default -1 return all rows.
707
708        """
709        return self._get_column(self._getinttime, row)
710
711
712    def get_sourcename(self, row=-1):
713        """\
714        Get a list source names for the observations.
715        Return a string for each integration in the scantable.
716        Parameters:
717
718            row:    row no of integration. Default -1 return all rows.
719
720        """
721        return self._get_column(self._getsourcename, row)
722
723    def get_elevation(self, row=-1):
724        """\
725        Get a list of elevations for the observations.
726        Return a float for each integration in the scantable.
727
728        Parameters:
729
730            row:    row no of integration. Default -1 return all rows.
731
732        """
733        return self._get_column(self._getelevation, row)
734
735    def get_azimuth(self, row=-1):
736        """\
737        Get a list of azimuths for the observations.
738        Return a float for each integration in the scantable.
739
740        Parameters:
741            row:    row no of integration. Default -1 return all rows.
742
743        """
744        return self._get_column(self._getazimuth, row)
745
746    def get_parangle(self, row=-1):
747        """\
748        Get a list of parallactic angles for the observations.
749        Return a float for each integration in the scantable.
750
751        Parameters:
752
753            row:    row no of integration. Default -1 return all rows.
754
755        """
756        return self._get_column(self._getparangle, row)
757
758    def get_direction(self, row=-1):
759        """
760        Get a list of Positions on the sky (direction) for the observations.
761        Return a string for each integration in the scantable.
762
763        Parameters:
764
765            row:    row no of integration. Default -1 return all rows
766
767        """
768        return self._get_column(self._getdirection, row)
769
770    def get_directionval(self, row=-1):
771        """\
772        Get a list of Positions on the sky (direction) for the observations.
773        Return a float for each integration in the scantable.
774
775        Parameters:
776
777            row:    row no of integration. Default -1 return all rows
778
779        """
780        return self._get_column(self._getdirectionvec, row)
781
782    @asaplog_post_dec
783    def set_unit(self, unit='channel'):
784        """\
785        Set the unit for all following operations on this scantable
786
787        Parameters:
788
789            unit:    optional unit, default is 'channel'. Use one of '*Hz',
790                     'km/s', 'channel' or equivalent ''
791
792        """
793        varlist = vars()
794        if unit in ['', 'pixel', 'channel']:
795            unit = ''
796        inf = list(self._getcoordinfo())
797        inf[0] = unit
798        self._setcoordinfo(inf)
799        self._add_history("set_unit", varlist)
800
801    @asaplog_post_dec
802    def set_instrument(self, instr):
803        """\
804        Set the instrument for subsequent processing.
805
806        Parameters:
807
808            instr:    Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
809                      'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
810
811        """
812        self._setInstrument(instr)
813        self._add_history("set_instument", vars())
814
815    @asaplog_post_dec
816    def set_feedtype(self, feedtype):
817        """\
818        Overwrite the feed type, which might not be set correctly.
819
820        Parameters:
821
822            feedtype:     'linear' or 'circular'
823
824        """
825        self._setfeedtype(feedtype)
826        self._add_history("set_feedtype", vars())
827
828    @asaplog_post_dec
829    def set_doppler(self, doppler='RADIO'):
830        """\
831        Set the doppler for all following operations on this scantable.
832
833        Parameters:
834
835            doppler:    One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
836
837        """
838        varlist = vars()
839        inf = list(self._getcoordinfo())
840        inf[2] = doppler
841        self._setcoordinfo(inf)
842        self._add_history("set_doppler", vars())
843
844    @asaplog_post_dec
845    def set_freqframe(self, frame=None):
846        """\
847        Set the frame type of the Spectral Axis.
848
849        Parameters:
850
851            frame:   an optional frame type, default 'LSRK'. Valid frames are:
852                     'TOPO', 'LSRD', 'LSRK', 'BARY',
853                     'GEO', 'GALACTO', 'LGROUP', 'CMB'
854
855        Example::
856
857            scan.set_freqframe('BARY')
858
859        """
860        frame = frame or rcParams['scantable.freqframe']
861        varlist = vars()
862        # "REST" is not implemented in casacore
863        #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
864        #           'GEO', 'GALACTO', 'LGROUP', 'CMB']
865        valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
866                   'GEO', 'GALACTO', 'LGROUP', 'CMB']
867
868        if frame in valid:
869            inf = list(self._getcoordinfo())
870            inf[1] = frame
871            self._setcoordinfo(inf)
872            self._add_history("set_freqframe", varlist)
873        else:
874            msg  = "Please specify a valid freq type. Valid types are:\n", valid
875            raise TypeError(msg)
876
877    @asaplog_post_dec
878    def set_dirframe(self, frame=""):
879        """\
880        Set the frame type of the Direction on the sky.
881
882        Parameters:
883
884            frame:   an optional frame type, default ''. Valid frames are:
885                     'J2000', 'B1950', 'GALACTIC'
886
887        Example:
888
889            scan.set_dirframe('GALACTIC')
890
891        """
892        varlist = vars()
893        Scantable.set_dirframe(self, frame)
894        self._add_history("set_dirframe", varlist)
895
896    def get_unit(self):
897        """\
898        Get the default unit set in this scantable
899
900        Returns:
901
902            A unit string
903
904        """
905        inf = self._getcoordinfo()
906        unit = inf[0]
907        if unit == '': unit = 'channel'
908        return unit
909
910    @asaplog_post_dec
911    def get_abcissa(self, rowno=0):
912        """\
913        Get the abcissa in the current coordinate setup for the currently
914        selected Beam/IF/Pol
915
916        Parameters:
917
918            rowno:    an optional row number in the scantable. Default is the
919                      first row, i.e. rowno=0
920
921        Returns:
922
923            The abcissa values and the format string (as a dictionary)
924
925        """
926        abc = self._getabcissa(rowno)
927        lbl = self._getabcissalabel(rowno)
928        return abc, lbl
929
930    @asaplog_post_dec
931    def flag(self, mask=None, unflag=False):
932        """\
933        Flag the selected data using an optional channel mask.
934
935        Parameters:
936
937            mask:   an optional channel mask, created with create_mask. Default
938                    (no mask) is all channels.
939
940            unflag:    if True, unflag the data
941
942        """
943        varlist = vars()
944        mask = mask or []
945        self._flag(mask, unflag)
946        self._add_history("flag", varlist)
947
948    @asaplog_post_dec
949    def flag_row(self, rows=[], unflag=False):
950        """\
951        Flag the selected data in row-based manner.
952
953        Parameters:
954
955            rows:   list of row numbers to be flagged. Default is no row
956                    (must be explicitly specified to execute row-based flagging).
957
958            unflag: if True, unflag the data.
959
960        """
961        varlist = vars()
962        self._flag_row(rows, unflag)
963        self._add_history("flag_row", varlist)
964
965    @asaplog_post_dec
966    def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
967        """\
968        Flag the selected data outside a specified range (in channel-base)
969
970        Parameters:
971
972            uthres:      upper threshold.
973
974            dthres:      lower threshold
975
976            clipoutside: True for flagging data outside the range [dthres:uthres].
977                         False for glagging data inside the range.
978
979            unflag:      if True, unflag the data.
980
981        """
982        varlist = vars()
983        self._clip(uthres, dthres, clipoutside, unflag)
984        self._add_history("clip", varlist)
985
986    @asaplog_post_dec
987    def lag_flag(self, start, end, unit="MHz", insitu=None):
988        """\
989        Flag the data in 'lag' space by providing a frequency to remove.
990        Flagged data in the scantable gets interpolated over the region.
991        No taper is applied.
992
993        Parameters:
994
995            start:    the start frequency (really a period within the
996                      bandwidth)  or period to remove
997
998            end:      the end frequency or period to remove
999
1000            unit:     the frequency unit (default "MHz") or "" for
1001                      explicit lag channels
1002
1003        *Notes*:
1004
1005            It is recommended to flag edges of the band or strong
1006            signals beforehand.
1007
1008        """
1009        if insitu is None: insitu = rcParams['insitu']
1010        self._math._setinsitu(insitu)
1011        varlist = vars()
1012        base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1013        if not (unit == "" or base.has_key(unit)):
1014            raise ValueError("%s is not a valid unit." % unit)
1015        if unit == "":
1016            s = scantable(self._math._lag_flag(self, start, end, "lags"))
1017        else:
1018            s = scantable(self._math._lag_flag(self, start*base[unit],
1019                                               end*base[unit], "frequency"))
1020        s._add_history("lag_flag", varlist)
1021        if insitu:
1022            self._assign(s)
1023        else:
1024            return s
1025
1026    @asaplog_post_dec
1027    def create_mask(self, *args, **kwargs):
1028        """\
1029        Compute and return a mask based on [min, max] windows.
1030        The specified windows are to be INCLUDED, when the mask is
1031        applied.
1032
1033        Parameters:
1034
1035            [min, max], [min2, max2], ...
1036                Pairs of start/end points (inclusive)specifying the regions
1037                to be masked
1038
1039            invert:     optional argument. If specified as True,
1040                        return an inverted mask, i.e. the regions
1041                        specified are EXCLUDED
1042
1043            row:        create the mask using the specified row for
1044                        unit conversions, default is row=0
1045                        only necessary if frequency varies over rows.
1046
1047        Examples::
1048
1049            scan.set_unit('channel')
1050            # a)
1051            msk = scan.create_mask([400, 500], [800, 900])
1052            # masks everything outside 400 and 500
1053            # and 800 and 900 in the unit 'channel'
1054
1055            # b)
1056            msk = scan.create_mask([400, 500], [800, 900], invert=True)
1057            # masks the regions between 400 and 500
1058            # and 800 and 900 in the unit 'channel'
1059
1060            # c)
1061            #mask only channel 400
1062            msk =  scan.create_mask([400])
1063
1064        """
1065        row = kwargs.get("row", 0)
1066        data = self._getabcissa(row)
1067        u = self._getcoordinfo()[0]
1068        if u == "":
1069            u = "channel"
1070        msg = "The current mask window unit is %s" % u
1071        i = self._check_ifs()
1072        if not i:
1073            msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1074        asaplog.push(msg)
1075        n = self.nchan()
1076        msk = _n_bools(n, False)
1077        # test if args is a 'list' or a 'normal *args - UGLY!!!
1078
1079        ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1080             and args or args[0]
1081        for window in ws:
1082            if len(window) == 1:
1083                window = [window[0], window[0]]
1084            if len(window) == 0 or  len(window) > 2:
1085                raise ValueError("A window needs to be defined as [start(, end)]")
1086            if window[0] > window[1]:
1087                tmp = window[0]
1088                window[0] = window[1]
1089                window[1] = tmp
1090            for i in range(n):
1091                if data[i] >= window[0] and data[i] <= window[1]:
1092                    msk[i] = True
1093        if kwargs.has_key('invert'):
1094            if kwargs.get('invert'):
1095                msk = mask_not(msk)
1096        return msk
1097
1098    def get_masklist(self, mask=None, row=0):
1099        """\
1100        Compute and return a list of mask windows, [min, max].
1101
1102        Parameters:
1103
1104            mask:       channel mask, created with create_mask.
1105
1106            row:        calcutate the masklist using the specified row
1107                        for unit conversions, default is row=0
1108                        only necessary if frequency varies over rows.
1109
1110        Returns:
1111
1112            [min, max], [min2, max2], ...
1113                Pairs of start/end points (inclusive)specifying
1114                the masked regions
1115
1116        """
1117        if not (isinstance(mask,list) or isinstance(mask, tuple)):
1118            raise TypeError("The mask should be list or tuple.")
1119        if len(mask) < 2:
1120            raise TypeError("The mask elements should be > 1")
1121        if self.nchan() != len(mask):
1122            msg = "Number of channels in scantable != number of mask elements"
1123            raise TypeError(msg)
1124        data = self._getabcissa(row)
1125        u = self._getcoordinfo()[0]
1126        if u == "":
1127            u = "channel"
1128        msg = "The current mask window unit is %s" % u
1129        i = self._check_ifs()
1130        if not i:
1131            msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1132        asaplog.push(msg)
1133        masklist=[]
1134        ist, ien = None, None
1135        ist, ien=self.get_mask_indices(mask)
1136        if ist is not None and ien is not None:
1137            for i in xrange(len(ist)):
1138                range=[data[ist[i]],data[ien[i]]]
1139                range.sort()
1140                masklist.append([range[0],range[1]])
1141        return masklist
1142
1143    def get_mask_indices(self, mask=None):
1144        """\
1145        Compute and Return lists of mask start indices and mask end indices.
1146
1147        Parameters:
1148
1149            mask:       channel mask, created with create_mask.
1150
1151        Returns:
1152
1153            List of mask start indices and that of mask end indices,
1154            i.e., [istart1,istart2,....], [iend1,iend2,....].
1155
1156        """
1157        if not (isinstance(mask,list) or isinstance(mask, tuple)):
1158            raise TypeError("The mask should be list or tuple.")
1159        if len(mask) < 2:
1160            raise TypeError("The mask elements should be > 1")
1161        istart=[]
1162        iend=[]
1163        if mask[0]: istart.append(0)
1164        for i in range(len(mask)-1):
1165            if not mask[i] and mask[i+1]:
1166                istart.append(i+1)
1167            elif mask[i] and not mask[i+1]:
1168                iend.append(i)
1169        if mask[len(mask)-1]: iend.append(len(mask)-1)
1170        if len(istart) != len(iend):
1171            raise RuntimeError("Numbers of mask start != mask end.")
1172        for i in range(len(istart)):
1173            if istart[i] > iend[i]:
1174                raise RuntimeError("Mask start index > mask end index")
1175                break
1176        return istart,iend
1177
1178#    def get_restfreqs(self):
1179#        """
1180#        Get the restfrequency(s) stored in this scantable.
1181#        The return value(s) are always of unit 'Hz'
1182#        Parameters:
1183#            none
1184#        Returns:
1185#            a list of doubles
1186#        """
1187#        return list(self._getrestfreqs())
1188
1189    def get_restfreqs(self, ids=None):
1190        """\
1191        Get the restfrequency(s) stored in this scantable.
1192        The return value(s) are always of unit 'Hz'
1193
1194        Parameters:
1195
1196            ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1197                 be retrieved
1198
1199        Returns:
1200
1201            dictionary containing ids and a list of doubles for each id
1202
1203        """
1204        if ids is None:
1205            rfreqs={}
1206            idlist = self.getmolnos()
1207            for i in idlist:
1208                rfreqs[i]=list(self._getrestfreqs(i))
1209            return rfreqs
1210        else:
1211            if type(ids)==list or type(ids)==tuple:
1212                rfreqs={}
1213                for i in ids:
1214                    rfreqs[i]=list(self._getrestfreqs(i))
1215                return rfreqs
1216            else:
1217                return list(self._getrestfreqs(ids))
1218            #return list(self._getrestfreqs(ids))
1219
1220    def set_restfreqs(self, freqs=None, unit='Hz'):
1221        """\
1222        Set or replace the restfrequency specified and
1223        If the 'freqs' argument holds a scalar,
1224        then that rest frequency will be applied to all the selected
1225        data.  If the 'freqs' argument holds
1226        a vector, then it MUST be of equal or smaller length than
1227        the number of IFs (and the available restfrequencies will be
1228        replaced by this vector).  In this case, *all* data have
1229        the restfrequency set per IF according
1230        to the corresponding value you give in the 'freqs' vector.
1231        E.g. 'freqs=[1e9, 2e9]'  would mean IF 0 gets restfreq 1e9 and
1232        IF 1 gets restfreq 2e9.
1233
1234        You can also specify the frequencies via a linecatalog.
1235
1236        Parameters:
1237
1238            freqs:   list of rest frequency values or string idenitfiers
1239
1240            unit:    unit for rest frequency (default 'Hz')
1241
1242
1243        Example::
1244
1245            # set the given restfrequency for the all currently selected IFs
1246            scan.set_restfreqs(freqs=1.4e9)
1247            # set restfrequencies for the n IFs  (n > 1) in the order of the
1248            # list, i.e
1249            # IF0 -> 1.4e9, IF1 ->  1.41e9, IF3 -> 1.42e9
1250            # len(list_of_restfreqs) == nIF
1251            # for nIF == 1 the following will set multiple restfrequency for
1252            # that IF
1253            scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
1254            # set multiple restfrequencies per IF. as a list of lists where
1255            # the outer list has nIF elements, the inner s arbitrary
1256            scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
1257
1258       *Note*:
1259
1260            To do more sophisticate Restfrequency setting, e.g. on a
1261            source and IF basis, use scantable.set_selection() before using
1262            this function::
1263
1264                # provided your scantable is called scan
1265                selection = selector()
1266                selection.set_name("ORION*")
1267                selection.set_ifs([1])
1268                scan.set_selection(selection)
1269                scan.set_restfreqs(freqs=86.6e9)
1270
1271        """
1272        varlist = vars()
1273        from asap import linecatalog
1274        # simple  value
1275        if isinstance(freqs, int) or isinstance(freqs, float):
1276            self._setrestfreqs([freqs], [""], unit)
1277        # list of values
1278        elif isinstance(freqs, list) or isinstance(freqs, tuple):
1279            # list values are scalars
1280            if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
1281                if len(freqs) == 1:
1282                    self._setrestfreqs(freqs, [""], unit)
1283                else:
1284                    # allow the 'old' mode of setting mulitple IFs
1285                    sel = selector()
1286                    savesel = self._getselection()
1287                    iflist = self.getifnos()
1288                    if len(freqs)>len(iflist):
1289                        raise ValueError("number of elements in list of list "
1290                                         "exeeds the current IF selections")
1291                    iflist = self.getifnos()
1292                    for i, fval in enumerate(freqs):
1293                        sel.set_ifs(iflist[i])
1294                        self._setselection(sel)
1295                        self._setrestfreqs([fval], [""], unit)
1296                    self._setselection(savesel)
1297
1298            # list values are dict, {'value'=, 'name'=)
1299            elif isinstance(freqs[-1], dict):
1300                values = []
1301                names = []
1302                for d in freqs:
1303                    values.append(d["value"])
1304                    names.append(d["name"])
1305                self._setrestfreqs(values, names, unit)
1306            elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
1307                sel = selector()
1308                savesel = self._getselection()
1309                iflist = self.getifnos()
1310                if len(freqs)>len(iflist):
1311                    raise ValueError("number of elements in list of list exeeds"
1312                                     " the current IF selections")
1313                for i, fval in enumerate(freqs):
1314                    sel.set_ifs(iflist[i])
1315                    self._setselection(sel)
1316                    self._setrestfreqs(fval, [""], unit)
1317                self._setselection(savesel)
1318        # freqs are to be taken from a linecatalog
1319        elif isinstance(freqs, linecatalog):
1320            sel = selector()
1321            savesel = self._getselection()
1322            for i in xrange(freqs.nrow()):
1323                sel.set_ifs(iflist[i])
1324                self._setselection(sel)
1325                self._setrestfreqs([freqs.get_frequency(i)],
1326                                   [freqs.get_name(i)], "MHz")
1327                # ensure that we are not iterating past nIF
1328                if i == self.nif()-1: break
1329            self._setselection(savesel)
1330        else:
1331            return
1332        self._add_history("set_restfreqs", varlist)
1333
1334    def shift_refpix(self, delta):
1335        """\
1336        Shift the reference pixel of the Spectra Coordinate by an
1337        integer amount.
1338
1339        Parameters:
1340
1341            delta:   the amount to shift by
1342
1343        *Note*:
1344
1345            Be careful using this with broadband data.
1346
1347        """
1348        Scantable.shift_refpix(self, delta)
1349
1350    @asaplog_post_dec
1351    def history(self, filename=None):
1352        """\
1353        Print the history. Optionally to a file.
1354
1355        Parameters:
1356
1357            filename:    The name  of the file to save the history to.
1358
1359        """
1360        hist = list(self._gethistory())
1361        out = "-"*80
1362        for h in hist:
1363            if h.startswith("---"):
1364                out = "\n".join([out, h])
1365            else:
1366                items = h.split("##")
1367                date = items[0]
1368                func = items[1]
1369                items = items[2:]
1370                out += "\n"+date+"\n"
1371                out += "Function: %s\n  Parameters:" % (func)
1372                for i in items:
1373                    s = i.split("=")
1374                    out += "\n   %s = %s" % (s[0], s[1])
1375                out = "\n".join([out, "-"*80])
1376        if filename is not None:
1377            if filename is "":
1378                filename = 'scantable_history.txt'
1379            import os
1380            filename = os.path.expandvars(os.path.expanduser(filename))
1381            if not os.path.isdir(filename):
1382                data = open(filename, 'w')
1383                data.write(out)
1384                data.close()
1385            else:
1386                msg = "Illegal file name '%s'." % (filename)
1387                raise IOError(msg)
1388        return page(out)
1389    #
1390    # Maths business
1391    #
1392    @asaplog_post_dec
1393    def average_time(self, mask=None, scanav=False, weight='tint', align=False):
1394        """\
1395        Return the (time) weighted average of a scan.
1396
1397        *Note*:
1398
1399            in channels only - align if necessary
1400
1401        Parameters:
1402
1403            mask:     an optional mask (only used for 'var' and 'tsys'
1404                      weighting)
1405
1406            scanav:   True averages each scan separately
1407                      False (default) averages all scans together,
1408
1409            weight:   Weighting scheme.
1410                      'none'     (mean no weight)
1411                      'var'      (1/var(spec) weighted)
1412                      'tsys'     (1/Tsys**2 weighted)
1413                      'tint'     (integration time weighted)
1414                      'tintsys'  (Tint/Tsys**2)
1415                      'median'   ( median averaging)
1416                      The default is 'tint'
1417
1418            align:    align the spectra in velocity before averaging. It takes
1419                      the time of the first spectrum as reference time.
1420
1421        Example::
1422
1423            # time average the scantable without using a mask
1424            newscan = scan.average_time()
1425
1426        """
1427        varlist = vars()
1428        weight = weight or 'TINT'
1429        mask = mask or ()
1430        scanav = (scanav and 'SCAN') or 'NONE'
1431        scan = (self, )
1432
1433        if align:
1434            scan = (self.freq_align(insitu=False), )
1435        s = None
1436        if weight.upper() == 'MEDIAN':
1437            s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1438                                                     scanav))
1439        else:
1440            s = scantable(self._math._average(scan, mask, weight.upper(),
1441                          scanav))
1442        s._add_history("average_time", varlist)
1443        return s
1444
1445    @asaplog_post_dec
1446    def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
1447        """\
1448        Return a scan where all spectra are converted to either
1449        Jansky or Kelvin depending upon the flux units of the scan table.
1450        By default the function tries to look the values up internally.
1451        If it can't find them (or if you want to over-ride), you must
1452        specify EITHER jyperk OR eta (and D which it will try to look up
1453        also if you don't set it). jyperk takes precedence if you set both.
1454
1455        Parameters:
1456
1457            jyperk:      the Jy / K conversion factor
1458
1459            eta:         the aperture efficiency
1460
1461            d:           the geomtric diameter (metres)
1462
1463            insitu:      if False a new scantable is returned.
1464                         Otherwise, the scaling is done in-situ
1465                         The default is taken from .asaprc (False)
1466
1467        """
1468        if insitu is None: insitu = rcParams['insitu']
1469        self._math._setinsitu(insitu)
1470        varlist = vars()
1471        jyperk = jyperk or -1.0
1472        d = d or -1.0
1473        eta = eta or -1.0
1474        s = scantable(self._math._convertflux(self, d, eta, jyperk))
1475        s._add_history("convert_flux", varlist)
1476        if insitu: self._assign(s)
1477        else: return s
1478
1479    @asaplog_post_dec
1480    def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1481        """\
1482        Return a scan after applying a gain-elevation correction.
1483        The correction can be made via either a polynomial or a
1484        table-based interpolation (and extrapolation if necessary).
1485        You specify polynomial coefficients, an ascii table or neither.
1486        If you specify neither, then a polynomial correction will be made
1487        with built in coefficients known for certain telescopes (an error
1488        will occur if the instrument is not known).
1489        The data and Tsys are *divided* by the scaling factors.
1490
1491        Parameters:
1492
1493            poly:        Polynomial coefficients (default None) to compute a
1494                         gain-elevation correction as a function of
1495                         elevation (in degrees).
1496
1497            filename:    The name of an ascii file holding correction factors.
1498                         The first row of the ascii file must give the column
1499                         names and these MUST include columns
1500                         "ELEVATION" (degrees) and "FACTOR" (multiply data
1501                         by this) somewhere.
1502                         The second row must give the data type of the
1503                         column. Use 'R' for Real and 'I' for Integer.
1504                         An example file would be
1505                         (actual factors are arbitrary) :
1506
1507                         TIME ELEVATION FACTOR
1508                         R R R
1509                         0.1 0 0.8
1510                         0.2 20 0.85
1511                         0.3 40 0.9
1512                         0.4 60 0.85
1513                         0.5 80 0.8
1514                         0.6 90 0.75
1515
1516            method:      Interpolation method when correcting from a table.
1517                         Values are  "nearest", "linear" (default), "cubic"
1518                         and "spline"
1519
1520            insitu:      if False a new scantable is returned.
1521                         Otherwise, the scaling is done in-situ
1522                         The default is taken from .asaprc (False)
1523
1524        """
1525
1526        if insitu is None: insitu = rcParams['insitu']
1527        self._math._setinsitu(insitu)
1528        varlist = vars()
1529        poly = poly or ()
1530        from os.path import expandvars
1531        filename = expandvars(filename)
1532        s = scantable(self._math._gainel(self, poly, filename, method))
1533        s._add_history("gain_el", varlist)
1534        if insitu:
1535            self._assign(s)
1536        else:
1537            return s
1538
1539    @asaplog_post_dec
1540    def freq_align(self, reftime=None, method='cubic', insitu=None):
1541        """\
1542        Return a scan where all rows have been aligned in frequency/velocity.
1543        The alignment frequency frame (e.g. LSRK) is that set by function
1544        set_freqframe.
1545
1546        Parameters:
1547
1548            reftime:     reference time to align at. By default, the time of
1549                         the first row of data is used.
1550
1551            method:      Interpolation method for regridding the spectra.
1552                         Choose from "nearest", "linear", "cubic" (default)
1553                         and "spline"
1554
1555            insitu:      if False a new scantable is returned.
1556                         Otherwise, the scaling is done in-situ
1557                         The default is taken from .asaprc (False)
1558
1559        """
1560        if insitu is None: insitu = rcParams["insitu"]
1561        self._math._setinsitu(insitu)
1562        varlist = vars()
1563        reftime = reftime or ""
1564        s = scantable(self._math._freq_align(self, reftime, method))
1565        s._add_history("freq_align", varlist)
1566        if insitu: self._assign(s)
1567        else: return s
1568
1569    @asaplog_post_dec
1570    def opacity(self, tau=None, insitu=None):
1571        """\
1572        Apply an opacity correction. The data
1573        and Tsys are multiplied by the correction factor.
1574
1575        Parameters:
1576
1577            tau:         (list of) opacity from which the correction factor is
1578                         exp(tau*ZD)
1579                         where ZD is the zenith-distance.
1580                         If a list is provided, it has to be of length nIF,
1581                         nIF*nPol or 1 and in order of IF/POL, e.g.
1582                         [opif0pol0, opif0pol1, opif1pol0 ...]
1583                         if tau is `None` the opacities are determined from a
1584                         model.
1585
1586            insitu:      if False a new scantable is returned.
1587                         Otherwise, the scaling is done in-situ
1588                         The default is taken from .asaprc (False)
1589
1590        """
1591        if insitu is None: insitu = rcParams['insitu']
1592        self._math._setinsitu(insitu)
1593        varlist = vars()
1594        if not hasattr(tau, "__len__"):
1595            tau = [tau]
1596        s = scantable(self._math._opacity(self, tau))
1597        s._add_history("opacity", varlist)
1598        if insitu: self._assign(s)
1599        else: return s
1600
1601    @asaplog_post_dec
1602    def bin(self, width=5, insitu=None):
1603        """\
1604        Return a scan where all spectra have been binned up.
1605
1606        Parameters:
1607
1608            width:       The bin width (default=5) in pixels
1609
1610            insitu:      if False a new scantable is returned.
1611                         Otherwise, the scaling is done in-situ
1612                         The default is taken from .asaprc (False)
1613
1614        """
1615        if insitu is None: insitu = rcParams['insitu']
1616        self._math._setinsitu(insitu)
1617        varlist = vars()
1618        s = scantable(self._math._bin(self, width))
1619        s._add_history("bin", varlist)
1620        if insitu:
1621            self._assign(s)
1622        else:
1623            return s
1624
1625    @asaplog_post_dec
1626    def resample(self, width=5, method='cubic', insitu=None):
1627        """\
1628        Return a scan where all spectra have been binned up.
1629
1630        Parameters:
1631
1632            width:       The bin width (default=5) in pixels
1633
1634            method:      Interpolation method when correcting from a table.
1635                         Values are  "nearest", "linear", "cubic" (default)
1636                         and "spline"
1637
1638            insitu:      if False a new scantable is returned.
1639                         Otherwise, the scaling is done in-situ
1640                         The default is taken from .asaprc (False)
1641
1642        """
1643        if insitu is None: insitu = rcParams['insitu']
1644        self._math._setinsitu(insitu)
1645        varlist = vars()
1646        s = scantable(self._math._resample(self, method, width))
1647        s._add_history("resample", varlist)
1648        if insitu: self._assign(s)
1649        else: return s
1650
1651    @asaplog_post_dec
1652    def average_pol(self, mask=None, weight='none'):
1653        """\
1654        Average the Polarisations together.
1655
1656        Parameters:
1657
1658            mask:        An optional mask defining the region, where the
1659                         averaging will be applied. The output will have all
1660                         specified points masked.
1661
1662            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1663                         weighted), or 'tsys' (1/Tsys**2 weighted)
1664
1665        """
1666        varlist = vars()
1667        mask = mask or ()
1668        s = scantable(self._math._averagepol(self, mask, weight.upper()))
1669        s._add_history("average_pol", varlist)
1670        return s
1671
1672    @asaplog_post_dec
1673    def average_beam(self, mask=None, weight='none'):
1674        """\
1675        Average the Beams together.
1676
1677        Parameters:
1678            mask:        An optional mask defining the region, where the
1679                         averaging will be applied. The output will have all
1680                         specified points masked.
1681
1682            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1683                         weighted), or 'tsys' (1/Tsys**2 weighted)
1684
1685        """
1686        varlist = vars()
1687        mask = mask or ()
1688        s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1689        s._add_history("average_beam", varlist)
1690        return s
1691
1692    def parallactify(self, pflag):
1693        """\
1694        Set a flag to indicate whether this data should be treated as having
1695        been 'parallactified' (total phase == 0.0)
1696
1697        Parameters:
1698
1699            pflag:  Bool indicating whether to turn this on (True) or
1700                    off (False)
1701
1702        """
1703        varlist = vars()
1704        self._parallactify(pflag)
1705        self._add_history("parallactify", varlist)
1706
1707    @asaplog_post_dec
1708    def convert_pol(self, poltype=None):
1709        """\
1710        Convert the data to a different polarisation type.
1711        Note that you will need cross-polarisation terms for most conversions.
1712
1713        Parameters:
1714
1715            poltype:    The new polarisation type. Valid types are:
1716                        "linear", "circular", "stokes" and "linpol"
1717
1718        """
1719        varlist = vars()
1720        s = scantable(self._math._convertpol(self, poltype))
1721        s._add_history("convert_pol", varlist)
1722        return s
1723
1724    @asaplog_post_dec
1725    def smooth(self, kernel="hanning", width=5.0, order=2, plot=False, insitu=None):
1726        """\
1727        Smooth the spectrum by the specified kernel (conserving flux).
1728
1729        Parameters:
1730
1731            kernel:     The type of smoothing kernel. Select from
1732                        'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1733                        or 'poly'
1734
1735            width:      The width of the kernel in pixels. For hanning this is
1736                        ignored otherwise it defauls to 5 pixels.
1737                        For 'gaussian' it is the Full Width Half
1738                        Maximum. For 'boxcar' it is the full width.
1739                        For 'rmedian' and 'poly' it is the half width.
1740
1741            order:      Optional parameter for 'poly' kernel (default is 2), to
1742                        specify the order of the polnomial. Ignored by all other
1743                        kernels.
1744
1745            plot:       plot the original and the smoothed spectra.
1746                        In this each indivual fit has to be approved, by
1747                        typing 'y' or 'n'
1748
1749            insitu:     if False a new scantable is returned.
1750                        Otherwise, the scaling is done in-situ
1751                        The default is taken from .asaprc (False)
1752
1753        """
1754        if insitu is None: insitu = rcParams['insitu']
1755        self._math._setinsitu(insitu)
1756        varlist = vars()
1757
1758        if plot: orgscan = self.copy()
1759
1760        s = scantable(self._math._smooth(self, kernel.lower(), width, order))
1761        s._add_history("smooth", varlist)
1762
1763        if plot:
1764            if rcParams['plotter.gui']:
1765                from asap.asaplotgui import asaplotgui as asaplot
1766            else:
1767                from asap.asaplot import asaplot
1768            self._p=asaplot()
1769            self._p.set_panels()
1770            ylab=s._get_ordinate_label()
1771            #self._p.palette(0,["#777777","red"])
1772            for r in xrange(s.nrow()):
1773                xsm=s._getabcissa(r)
1774                ysm=s._getspectrum(r)
1775                xorg=orgscan._getabcissa(r)
1776                yorg=orgscan._getspectrum(r)
1777                self._p.clear()
1778                self._p.hold()
1779                self._p.set_axes('ylabel',ylab)
1780                self._p.set_axes('xlabel',s._getabcissalabel(r))
1781                self._p.set_axes('title',s._getsourcename(r))
1782                self._p.set_line(label='Original',color="#777777")
1783                self._p.plot(xorg,yorg)
1784                self._p.set_line(label='Smoothed',color="red")
1785                self._p.plot(xsm,ysm)
1786                ### Ugly part for legend
1787                for i in [0,1]:
1788                    self._p.subplots[0]['lines'].append([self._p.subplots[0]['axes'].lines[i]])
1789                self._p.release()
1790                ### Ugly part for legend
1791                self._p.subplots[0]['lines']=[]
1792                res = raw_input("Accept smoothing ([y]/n): ")
1793                if res.upper() == 'N':
1794                    s._setspectrum(yorg, r)
1795            self._p.unmap()
1796            self._p = None
1797            del orgscan
1798
1799        if insitu: self._assign(s)
1800        else: return s
1801
1802    @asaplog_post_dec
1803    def poly_baseline(self, mask=None, order=0, plot=False, uselin=False,
1804                      insitu=None):
1805        """\
1806        Return a scan which has been baselined (all rows) by a polynomial.
1807
1808        Parameters:
1809
1810            mask:       an optional mask
1811
1812            order:      the order of the polynomial (default is 0)
1813
1814            plot:       plot the fit and the residual. In this each
1815                        indivual fit has to be approved, by typing 'y'
1816                        or 'n'
1817
1818            uselin:     use linear polynomial fit
1819
1820            insitu:     if False a new scantable is returned.
1821                        Otherwise, the scaling is done in-situ
1822                        The default is taken from .asaprc (False)
1823
1824        Example::
1825
1826            # return a scan baselined by a third order polynomial,
1827            # not using a mask
1828            bscan = scan.poly_baseline(order=3)
1829
1830        """
1831        if insitu is None: insitu = rcParams['insitu']
1832        if not insitu:
1833            workscan = self.copy()
1834        else:
1835            workscan = self
1836        varlist = vars()
1837        if mask is None:
1838            mask = [True for i in xrange(self.nchan(-1))]
1839
1840        from asap.asapfitter import fitter
1841        try:
1842            f = fitter()
1843            if uselin:
1844                f.set_function(lpoly=order)
1845            else:
1846                f.set_function(poly=order)
1847
1848            rows = range(workscan.nrow())
1849            if len(rows) > 0:
1850                self.blpars = []
1851
1852            for r in rows:
1853                # take into account flagtra info (CAS-1434)
1854                flagtra = workscan._getmask(r)
1855                actualmask = mask[:]
1856                if len(actualmask) == 0:
1857                    actualmask = list(flagtra[:])
1858                else:
1859                    if len(actualmask) != len(flagtra):
1860                        raise RuntimeError, "Mask and flagtra have different length"
1861                    else:
1862                        for i in range(0, len(actualmask)):
1863                            actualmask[i] = actualmask[i] and flagtra[i]
1864                f.set_scan(workscan, actualmask)
1865                f.x = workscan._getabcissa(r)
1866                f.y = workscan._getspectrum(r)
1867                f.data = None
1868                f.fit()
1869                if plot:
1870                    f.plot(residual=True)
1871                    x = raw_input("Accept fit ( [y]/n ): ")
1872                    if x.upper() == 'N':
1873                        self.blpars.append(None)
1874                        continue
1875                workscan._setspectrum(f.fitter.getresidual(), r)
1876                self.blpars.append(f.get_parameters())
1877
1878            if plot:
1879                f._p.unmap()
1880                f._p = None
1881            workscan._add_history("poly_baseline", varlist)
1882            if insitu:
1883                self._assign(workscan)
1884            else:
1885                return workscan
1886        except RuntimeError:
1887            msg = "The fit failed, possibly because it didn't converge."
1888            raise RuntimeError(msg)
1889
1890
1891    def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
1892                           threshold=3, chan_avg_limit=1, plot=False,
1893                           insitu=None):
1894        """\
1895        Return a scan which has been baselined (all rows) by a polynomial.
1896        Spectral lines are detected first using linefinder and masked out
1897        to avoid them affecting the baseline solution.
1898
1899        Parameters:
1900
1901            mask:       an optional mask retreived from scantable
1902
1903            edge:       an optional number of channel to drop at the edge of
1904                        spectrum. If only one value is
1905                        specified, the same number will be dropped from
1906                        both sides of the spectrum. Default is to keep
1907                        all channels. Nested tuples represent individual
1908                        edge selection for different IFs (a number of spectral
1909                        channels can be different)
1910
1911            order:      the order of the polynomial (default is 0)
1912
1913            threshold:  the threshold used by line finder. It is better to
1914                        keep it large as only strong lines affect the
1915                        baseline solution.
1916
1917            chan_avg_limit:
1918                        a maximum number of consequtive spectral channels to
1919                        average during the search of weak and broad lines.
1920                        The default is no averaging (and no search for weak
1921                        lines). If such lines can affect the fitted baseline
1922                        (e.g. a high order polynomial is fitted), increase this
1923                        parameter (usually values up to 8 are reasonable). Most
1924                        users of this method should find the default value
1925                        sufficient.
1926
1927            plot:       plot the fit and the residual. In this each
1928                        indivual fit has to be approved, by typing 'y'
1929                        or 'n'
1930
1931            insitu:     if False a new scantable is returned.
1932                        Otherwise, the scaling is done in-situ
1933                        The default is taken from .asaprc (False)
1934
1935
1936        Example::
1937
1938            scan2 = scan.auto_poly_baseline(order=7, insitu=False)
1939
1940        """
1941        if insitu is None: insitu = rcParams['insitu']
1942        varlist = vars()
1943        from asap.asapfitter import fitter
1944        from asap.asaplinefind import linefinder
1945        from asap import _is_sequence_or_number as _is_valid
1946
1947        # check whether edge is set up for each IF individually
1948        individualedge = False;
1949        if len(edge) > 1:
1950            if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1951                individualedge = True;
1952
1953        if not _is_valid(edge, int) and not individualedge:
1954            raise ValueError, "Parameter 'edge' has to be an integer or a \
1955            pair of integers specified as a tuple. Nested tuples are allowed \
1956            to make individual selection for different IFs."
1957
1958        curedge = (0, 0)
1959        if individualedge:
1960            for edgepar in edge:
1961                if not _is_valid(edgepar, int):
1962                    raise ValueError, "Each element of the 'edge' tuple has \
1963                                       to be a pair of integers or an integer."
1964        else:
1965            curedge = edge;
1966
1967        # setup fitter
1968        f = fitter()
1969        f.set_function(poly=order)
1970
1971        # setup line finder
1972        fl = linefinder()
1973        fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
1974
1975        if not insitu:
1976            workscan = self.copy()
1977        else:
1978            workscan = self
1979
1980        fl.set_scan(workscan)
1981
1982        rows = range(workscan.nrow())
1983        # Save parameters of baseline fits & masklists as a class attribute.
1984        # NOTICE: It does not reflect changes in scantable!
1985        if len(rows) > 0:
1986            self.blpars=[]
1987            self.masklists=[]
1988        asaplog.push("Processing:")
1989        for r in rows:
1990            msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1991                (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1992                 workscan.getpol(r), workscan.getcycle(r))
1993            asaplog.push(msg, False)
1994
1995            # figure out edge parameter
1996            if individualedge:
1997                if len(edge) >= workscan.getif(r):
1998                    raise RuntimeError, "Number of edge elements appear to " \
1999                                        "be less than the number of IFs"
2000                    curedge = edge[workscan.getif(r)]
2001
2002            # take into account flagtra info (CAS-1434)
2003            flagtra = workscan._getmask(r)
2004            actualmask = mask[:]
2005            if len(actualmask) == 0:
2006                actualmask = list(flagtra[:])
2007            else:
2008                if len(actualmask) != len(flagtra):
2009                    raise RuntimeError, "Mask and flagtra have different length"
2010                else:
2011                    for i in range(0, len(actualmask)):
2012                        actualmask[i] = actualmask[i] and flagtra[i]
2013
2014            # setup line finder
2015            fl.find_lines(r, actualmask, curedge)
2016            outmask=fl.get_mask()
2017            f.set_scan(workscan, fl.get_mask())
2018            f.x = workscan._getabcissa(r)
2019            f.y = workscan._getspectrum(r)
2020            f.data = None
2021            f.fit()
2022
2023            # Show mask list
2024            masklist=workscan.get_masklist(fl.get_mask(),row=r)
2025            msg = "mask range: "+str(masklist)
2026            asaplog.push(msg, False)
2027
2028            if plot:
2029                f.plot(residual=True)
2030                x = raw_input("Accept fit ( [y]/n ): ")
2031                if x.upper() == 'N':
2032                    self.blpars.append(None)
2033                    self.masklists.append(None)
2034                    continue
2035
2036            workscan._setspectrum(f.fitter.getresidual(), r)
2037            self.blpars.append(f.get_parameters())
2038            self.masklists.append(masklist)
2039        if plot:
2040            f._p.unmap()
2041            f._p = None
2042        workscan._add_history("auto_poly_baseline", varlist)
2043        if insitu:
2044            self._assign(workscan)
2045        else:
2046            return workscan
2047
2048    @asaplog_post_dec
2049    def rotate_linpolphase(self, angle):
2050        """\
2051        Rotate the phase of the complex polarization O=Q+iU correlation.
2052        This is always done in situ in the raw data.  So if you call this
2053        function more than once then each call rotates the phase further.
2054
2055        Parameters:
2056
2057            angle:   The angle (degrees) to rotate (add) by.
2058
2059        Example::
2060
2061            scan.rotate_linpolphase(2.3)
2062
2063        """
2064        varlist = vars()
2065        self._math._rotate_linpolphase(self, angle)
2066        self._add_history("rotate_linpolphase", varlist)
2067        return
2068
2069    @asaplog_post_dec
2070    def rotate_xyphase(self, angle):
2071        """\
2072        Rotate the phase of the XY correlation.  This is always done in situ
2073        in the data.  So if you call this function more than once
2074        then each call rotates the phase further.
2075
2076        Parameters:
2077
2078            angle:   The angle (degrees) to rotate (add) by.
2079
2080        Example::
2081
2082            scan.rotate_xyphase(2.3)
2083
2084        """
2085        varlist = vars()
2086        self._math._rotate_xyphase(self, angle)
2087        self._add_history("rotate_xyphase", varlist)
2088        return
2089
2090    @asaplog_post_dec
2091    def swap_linears(self):
2092        """\
2093        Swap the linear polarisations XX and YY, or better the first two
2094        polarisations as this also works for ciculars.
2095        """
2096        varlist = vars()
2097        self._math._swap_linears(self)
2098        self._add_history("swap_linears", varlist)
2099        return
2100
2101    @asaplog_post_dec
2102    def invert_phase(self):
2103        """\
2104        Invert the phase of the complex polarisation
2105        """
2106        varlist = vars()
2107        self._math._invert_phase(self)
2108        self._add_history("invert_phase", varlist)
2109        return
2110
2111    @asaplog_post_dec
2112    def add(self, offset, insitu=None):
2113        """\
2114        Return a scan where all spectra have the offset added
2115
2116        Parameters:
2117
2118            offset:      the offset
2119
2120            insitu:      if False a new scantable is returned.
2121                         Otherwise, the scaling is done in-situ
2122                         The default is taken from .asaprc (False)
2123
2124        """
2125        if insitu is None: insitu = rcParams['insitu']
2126        self._math._setinsitu(insitu)
2127        varlist = vars()
2128        s = scantable(self._math._unaryop(self, offset, "ADD", False))
2129        s._add_history("add", varlist)
2130        if insitu:
2131            self._assign(s)
2132        else:
2133            return s
2134
2135    @asaplog_post_dec
2136    def scale(self, factor, tsys=True, insitu=None):
2137        """\
2138
2139        Return a scan where all spectra are scaled by the give 'factor'
2140
2141        Parameters:
2142
2143            factor:      the scaling factor (float or 1D float list)
2144
2145            insitu:      if False a new scantable is returned.
2146                         Otherwise, the scaling is done in-situ
2147                         The default is taken from .asaprc (False)
2148
2149            tsys:        if True (default) then apply the operation to Tsys
2150                         as well as the data
2151
2152        """
2153        if insitu is None: insitu = rcParams['insitu']
2154        self._math._setinsitu(insitu)
2155        varlist = vars()
2156        s = None
2157        import numpy
2158        if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
2159            if isinstance(factor[0], list) or isinstance(factor[0], numpy.ndarray):
2160                from asapmath import _array2dOp
2161                s = _array2dOp( self.copy(), factor, "MUL", tsys )
2162            else:
2163                s = scantable( self._math._arrayop( self.copy(), factor, "MUL", tsys ) )
2164        else:
2165            s = scantable(self._math._unaryop(self.copy(), factor, "MUL", tsys))
2166        s._add_history("scale", varlist)
2167        if insitu:
2168            self._assign(s)
2169        else:
2170            return s
2171
2172    def set_sourcetype(self, match, matchtype="pattern",
2173                       sourcetype="reference"):
2174        """\
2175        Set the type of the source to be an source or reference scan
2176        using the provided pattern.
2177
2178        Parameters:
2179
2180            match:          a Unix style pattern, regular expression or selector
2181
2182            matchtype:      'pattern' (default) UNIX style pattern or
2183                            'regex' regular expression
2184
2185            sourcetype:     the type of the source to use (source/reference)
2186
2187        """
2188        varlist = vars()
2189        basesel = self.get_selection()
2190        stype = -1
2191        if sourcetype.lower().startswith("r"):
2192            stype = 1
2193        elif sourcetype.lower().startswith("s"):
2194            stype = 0
2195        else:
2196            raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
2197        if matchtype.lower().startswith("p"):
2198            matchtype = "pattern"
2199        elif matchtype.lower().startswith("r"):
2200            matchtype = "regex"
2201        else:
2202            raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
2203        sel = selector()
2204        if isinstance(match, selector):
2205            sel = match
2206        else:
2207            sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
2208        self.set_selection(basesel+sel)
2209        self._setsourcetype(stype)
2210        self.set_selection(basesel)
2211        self._add_history("set_sourcetype", varlist)
2212
2213    @asaplog_post_dec
2214    @preserve_selection
2215    def auto_quotient(self, preserve=True, mode='paired', verify=False):
2216        """\
2217        This function allows to build quotients automatically.
2218        It assumes the observation to have the same number of
2219        "ons" and "offs"
2220
2221        Parameters:
2222
2223            preserve:       you can preserve (default) the continuum or
2224                            remove it.  The equations used are
2225
2226                            preserve: Output = Toff * (on/off) - Toff
2227
2228                            remove:   Output = Toff * (on/off) - Ton
2229
2230            mode:           the on/off detection mode
2231                            'paired' (default)
2232                            identifies 'off' scans by the
2233                            trailing '_R' (Mopra/Parkes) or
2234                            '_e'/'_w' (Tid) and matches
2235                            on/off pairs from the observing pattern
2236                            'time'
2237                            finds the closest off in time
2238
2239        .. todo:: verify argument is not implemented
2240
2241        """
2242        varlist = vars()
2243        modes = ["time", "paired"]
2244        if not mode in modes:
2245            msg = "please provide valid mode. Valid modes are %s" % (modes)
2246            raise ValueError(msg)
2247        s = None
2248        if mode.lower() == "paired":
2249            sel = self.get_selection()
2250            sel.set_query("SRCTYPE==1")
2251            self.set_selection(sel)
2252            offs = self.copy()
2253            sel.set_query("SRCTYPE==0")
2254            self.set_selection(sel)
2255            ons = self.copy()
2256            s = scantable(self._math._quotient(ons, offs, preserve))
2257        elif mode.lower() == "time":
2258            s = scantable(self._math._auto_quotient(self, mode, preserve))
2259        s._add_history("auto_quotient", varlist)
2260        return s
2261
2262    @asaplog_post_dec
2263    def mx_quotient(self, mask = None, weight='median', preserve=True):
2264        """\
2265        Form a quotient using "off" beams when observing in "MX" mode.
2266
2267        Parameters:
2268
2269            mask:           an optional mask to be used when weight == 'stddev'
2270
2271            weight:         How to average the off beams.  Default is 'median'.
2272
2273            preserve:       you can preserve (default) the continuum or
2274                            remove it.  The equations used are:
2275
2276                                preserve: Output = Toff * (on/off) - Toff
2277
2278                                remove:   Output = Toff * (on/off) - Ton
2279
2280        """
2281        mask = mask or ()
2282        varlist = vars()
2283        on = scantable(self._math._mx_extract(self, 'on'))
2284        preoff = scantable(self._math._mx_extract(self, 'off'))
2285        off = preoff.average_time(mask=mask, weight=weight, scanav=False)
2286        from asapmath  import quotient
2287        q = quotient(on, off, preserve)
2288        q._add_history("mx_quotient", varlist)
2289        return q
2290
2291    @asaplog_post_dec
2292    def freq_switch(self, insitu=None):
2293        """\
2294        Apply frequency switching to the data.
2295
2296        Parameters:
2297
2298            insitu:      if False a new scantable is returned.
2299                         Otherwise, the swictching is done in-situ
2300                         The default is taken from .asaprc (False)
2301
2302        """
2303        if insitu is None: insitu = rcParams['insitu']
2304        self._math._setinsitu(insitu)
2305        varlist = vars()
2306        s = scantable(self._math._freqswitch(self))
2307        s._add_history("freq_switch", varlist)
2308        if insitu:
2309            self._assign(s)
2310        else:
2311            return s
2312
2313    @asaplog_post_dec
2314    def recalc_azel(self):
2315        """Recalculate the azimuth and elevation for each position."""
2316        varlist = vars()
2317        self._recalcazel()
2318        self._add_history("recalc_azel", varlist)
2319        return
2320
2321    @asaplog_post_dec
2322    def __add__(self, other):
2323        varlist = vars()
2324        s = None
2325        if isinstance(other, scantable):
2326            s = scantable(self._math._binaryop(self, other, "ADD"))
2327        elif isinstance(other, float):
2328            s = scantable(self._math._unaryop(self, other, "ADD", False))
2329        else:
2330            raise TypeError("Other input is not a scantable or float value")
2331        s._add_history("operator +", varlist)
2332        return s
2333
2334    @asaplog_post_dec
2335    def __sub__(self, other):
2336        """
2337        implicit on all axes and on Tsys
2338        """
2339        varlist = vars()
2340        s = None
2341        if isinstance(other, scantable):
2342            s = scantable(self._math._binaryop(self, other, "SUB"))
2343        elif isinstance(other, float):
2344            s = scantable(self._math._unaryop(self, other, "SUB", False))
2345        else:
2346            raise TypeError("Other input is not a scantable or float value")
2347        s._add_history("operator -", varlist)
2348        return s
2349
2350    @asaplog_post_dec
2351    def __mul__(self, other):
2352        """
2353        implicit on all axes and on Tsys
2354        """
2355        varlist = vars()
2356        s = None
2357        if isinstance(other, scantable):
2358            s = scantable(self._math._binaryop(self, other, "MUL"))
2359        elif isinstance(other, float):
2360            s = scantable(self._math._unaryop(self, other, "MUL", False))
2361        else:
2362            raise TypeError("Other input is not a scantable or float value")
2363        s._add_history("operator *", varlist)
2364        return s
2365
2366
2367    @asaplog_post_dec
2368    def __div__(self, other):
2369        """
2370        implicit on all axes and on Tsys
2371        """
2372        varlist = vars()
2373        s = None
2374        if isinstance(other, scantable):
2375            s = scantable(self._math._binaryop(self, other, "DIV"))
2376        elif isinstance(other, float):
2377            if other == 0.0:
2378                raise ZeroDivisionError("Dividing by zero is not recommended")
2379            s = scantable(self._math._unaryop(self, other, "DIV", False))
2380        else:
2381            raise TypeError("Other input is not a scantable or float value")
2382        s._add_history("operator /", varlist)
2383        return s
2384
2385    @asaplog_post_dec
2386    def get_fit(self, row=0):
2387        """\
2388        Print or return the stored fits for a row in the scantable
2389
2390        Parameters:
2391
2392            row:    the row which the fit has been applied to.
2393
2394        """
2395        if row > self.nrow():
2396            return
2397        from asap.asapfit import asapfit
2398        fit = asapfit(self._getfit(row))
2399        asaplog.push( '%s' %(fit) )
2400        return fit.as_dict()
2401
2402    def flag_nans(self):
2403        """\
2404        Utility function to flag NaN values in the scantable.
2405        """
2406        import numpy
2407        basesel = self.get_selection()
2408        for i in range(self.nrow()):
2409            sel = self.get_row_selector(i)
2410            self.set_selection(basesel+sel)
2411            nans = numpy.isnan(self._getspectrum(0))
2412        if numpy.any(nans):
2413            bnans = [ bool(v) for v in nans]
2414            self.flag(bnans)
2415        self.set_selection(basesel)
2416
2417    def get_row_selector(self, rowno):
2418        return selector(beams=self.getbeam(rowno),
2419                        ifs=self.getif(rowno),
2420                        pols=self.getpol(rowno),
2421                        scans=self.getscan(rowno),
2422                        cycles=self.getcycle(rowno))
2423
2424    def _add_history(self, funcname, parameters):
2425        if not rcParams['scantable.history']:
2426            return
2427        # create date
2428        sep = "##"
2429        from datetime import datetime
2430        dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
2431        hist = dstr+sep
2432        hist += funcname+sep#cdate+sep
2433        if parameters.has_key('self'): del parameters['self']
2434        for k, v in parameters.iteritems():
2435            if type(v) is dict:
2436                for k2, v2 in v.iteritems():
2437                    hist += k2
2438                    hist += "="
2439                    if isinstance(v2, scantable):
2440                        hist += 'scantable'
2441                    elif k2 == 'mask':
2442                        if isinstance(v2, list) or isinstance(v2, tuple):
2443                            hist += str(self._zip_mask(v2))
2444                        else:
2445                            hist += str(v2)
2446                    else:
2447                        hist += str(v2)
2448            else:
2449                hist += k
2450                hist += "="
2451                if isinstance(v, scantable):
2452                    hist += 'scantable'
2453                elif k == 'mask':
2454                    if isinstance(v, list) or isinstance(v, tuple):
2455                        hist += str(self._zip_mask(v))
2456                    else:
2457                        hist += str(v)
2458                else:
2459                    hist += str(v)
2460            hist += sep
2461        hist = hist[:-2] # remove trailing '##'
2462        self._addhistory(hist)
2463
2464
2465    def _zip_mask(self, mask):
2466        mask = list(mask)
2467        i = 0
2468        segments = []
2469        while mask[i:].count(1):
2470            i += mask[i:].index(1)
2471            if mask[i:].count(0):
2472                j = i + mask[i:].index(0)
2473            else:
2474                j = len(mask)
2475            segments.append([i, j])
2476            i = j
2477        return segments
2478
2479    def _get_ordinate_label(self):
2480        fu = "("+self.get_fluxunit()+")"
2481        import re
2482        lbl = "Intensity"
2483        if re.match(".K.", fu):
2484            lbl = "Brightness Temperature "+ fu
2485        elif re.match(".Jy.", fu):
2486            lbl = "Flux density "+ fu
2487        return lbl
2488
2489    def _check_ifs(self):
2490        nchans = [self.nchan(i) for i in range(self.nif(-1))]
2491        nchans = filter(lambda t: t > 0, nchans)
2492        return (sum(nchans)/len(nchans) == nchans[0])
2493
2494    @asaplog_post_dec
2495    def _fill(self, names, unit, average, getpt, antenna):
2496        first = True
2497        fullnames = []
2498        for name in names:
2499            name = os.path.expandvars(name)
2500            name = os.path.expanduser(name)
2501            if not os.path.exists(name):
2502                msg = "File '%s' does not exists" % (name)
2503                raise IOError(msg)
2504            fullnames.append(name)
2505        if average:
2506            asaplog.push('Auto averaging integrations')
2507        stype = int(rcParams['scantable.storage'].lower() == 'disk')
2508        for name in fullnames:
2509            tbl = Scantable(stype)
2510            r = filler(tbl)
2511            rx = rcParams['scantable.reference']
2512            r.setreferenceexpr(rx)
2513            msg = "Importing %s..." % (name)
2514            asaplog.push(msg, False)
2515            r.open(name)# antenna, -1, -1, getpt)
2516            r.fill()
2517            if average:
2518                tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
2519            if not first:
2520                tbl = self._math._merge([self, tbl])
2521            Scantable.__init__(self, tbl)
2522            r.close()
2523            del r, tbl
2524            first = False
2525            #flush log
2526        asaplog.post()
2527        if unit is not None:
2528            self.set_fluxunit(unit)
2529        if not is_casapy():
2530            self.set_freqframe(rcParams['scantable.freqframe'])
2531
2532    def __getitem__(self, key):
2533        if key < 0:
2534            key += self.nrow()
2535        if key >= self.nrow():
2536            raise IndexError("Row index out of range.")
2537        return self._getspectrum(key)
2538
2539    def __setitem__(self, key, value):
2540        if key < 0:
2541            key += self.nrow()
2542        if key >= self.nrow():
2543            raise IndexError("Row index out of range.")
2544        if not hasattr(value, "__len__") or \
2545                len(value) > self.nchan(self.getif(key)):
2546            raise ValueError("Spectrum length doesn't match.")
2547        return self._setspectrum(value, key)
2548
2549    def __len__(self):
2550        return self.nrow()
2551
2552    def __iter__(self):
2553        for i in range(len(self)):
2554            yield self[i]
Note: See TracBrowser for help on using the repository browser.