source: trunk/python/scantable.py @ 1697

Last change on this file since 1697 was 1697, checked in by Malte Marquarding, 14 years ago

Some minor tidy up and documentation

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 70.6 KB
Line 
1import os
2try:
3    from functools import wraps as wraps_dec
4except ImportError:
5    from asap.compatibility import wraps as wraps_dec
6
7from asap._asap import Scantable
8from asap import rcParams
9from asap import print_log, print_log_dec
10from asap import asaplog
11from asap import selector
12from asap import linecatalog
13from asap.coordinate import coordinate
14from asap import _n_bools, mask_not, mask_and, mask_or
15
16
17def preserve_selection(func):
18    @wraps_dec(func)
19    def wrap(obj, *args, **kw):
20        basesel = obj.get_selection()
21        val = func(obj, *args, **kw)
22        obj.set_selection(basesel)
23        return val
24    return wrap
25
26
27def is_scantable(filename):
28    return (os.path.isdir(filename)
29            and not os.path.exists(filename+'/table.f1')
30            and os.path.exists(filename+'/table.info'))
31
32
33class scantable(Scantable):
34    """
35        The ASAP container for scans
36    """
37    @print_log_dec
38    def __init__(self, filename, average=None, unit=None, parallactify=None):
39        """
40        Create a scantable from a saved one or make a reference
41        Parameters:
42            filename:    the name of an asap table on disk
43                         or
44                         the name of a rpfits/sdfits/ms file
45                         (integrations within scans are auto averaged
46                         and the whole file is read)
47                         or
48                         [advanced] a reference to an existing
49                         scantable
50            average:     average all integrations withinb a scan on read.
51                         The default (True) is taken from .asaprc.
52            unit:         brightness unit; must be consistent with K or Jy.
53                         Over-rides the default selected by the reader
54                         (input rpfits/sdfits/ms) or replaces the value
55                         in existing scantables
56            parallactify: Indcicate that the data had been parallatified.
57                          Default is taken form rc file.
58        """
59        if average is None:
60            average = rcParams['scantable.autoaverage']
61        parallactify = parallactify or rcParams['scantable.parallactify']
62        varlist = vars()
63        from asap._asap import stmath
64        self._math = stmath()
65        if isinstance(filename, Scantable):
66            Scantable.__init__(self, filename)
67        else:
68            if isinstance(filename, str):
69                filename = os.path.expandvars(filename)
70                filename = os.path.expanduser(filename)
71                if not os.path.exists(filename):
72                    s = "File '%s' not found." % (filename)
73                    if rcParams['verbose']:
74                        asaplog.push(s)
75                        return
76                    raise IOError(s)
77                if is_scantable(filename):
78                    ondisk = rcParams['scantable.storage'] == 'disk'
79                    Scantable.__init__(self, filename, ondisk)
80                    if unit is not None:
81                        self.set_fluxunit(unit)
82                    self.set_freqframe(rcParams['scantable.freqframe'])
83                else:
84                    self._fill([filename], unit, average)
85            elif (isinstance(filename, list) or isinstance(filename, tuple)) \
86                  and isinstance(filename[-1], str):
87                self._fill(filename, unit, average)
88        self.parallactify(parallactify)
89        self._add_history("scantable", varlist)
90
91    @print_log_dec
92    def save(self, name=None, format=None, overwrite=False):
93        """
94        Store the scantable on disk. This can be an asap (aips++) Table,
95        SDFITS or MS2 format.
96        Parameters:
97            name:        the name of the outputfile. For format "ASCII"
98                         this is the root file name (data in 'name'.txt
99                         and header in 'name'_header.txt)
100            format:      an optional file format. Default is ASAP.
101                         Allowed are - 'ASAP' (save as ASAP [aips++] Table),
102                                       'SDFITS' (save as SDFITS file)
103                                       'ASCII' (saves as ascii text file)
104                                       'MS2' (saves as an aips++
105                                              MeasurementSet V2)
106                                       'FITS' (save as image FITS - not
107                                               readable by class)
108                                       'CLASS' (save as FITS readable by CLASS)
109            overwrite:   If the file should be overwritten if it exists.
110                         The default False is to return with warning
111                         without writing the output. USE WITH CARE.
112        Example:
113            scan.save('myscan.asap')
114            scan.save('myscan.sdfits', 'SDFITS')
115        """
116        from os import path
117        format = format or rcParams['scantable.save']
118        suffix = '.'+format.lower()
119        if name is None or name == "":
120            name = 'scantable'+suffix
121            msg = "No filename given. Using default name %s..." % name
122            asaplog.push(msg)
123        name = path.expandvars(name)
124        if path.isfile(name) or path.isdir(name):
125            if not overwrite:
126                msg = "File %s exists." % name
127                if rcParams['verbose']:
128                    print msg
129                    return
130                else:
131                    raise IOError(msg)
132        format2 = format.upper()
133        if format2 == 'ASAP':
134            self._save(name)
135        else:
136            from asap._asap import stwriter as stw
137            writer = stw(format2)
138            writer.write(self, name)
139        return
140
141    def copy(self):
142        """
143        Return a copy of this scantable.
144        Note:
145            This makes a full (deep) copy. scan2 = scan1 makes a reference.
146        Parameters:
147            none
148        Example:
149            copiedscan = scan.copy()
150        """
151        sd = scantable(Scantable._copy(self))
152        return sd
153
154    def drop_scan(self, scanid=None):
155        """
156        Return a new scantable where the specified scan number(s) has(have)
157        been dropped.
158        Parameters:
159            scanid:    a (list of) scan number(s)
160        """
161        from asap import _is_sequence_or_number as _is_valid
162        from asap import _to_list
163        from asap import unique
164        if not _is_valid(scanid):
165            if rcParams['verbose']:
166                print "Please specify a scanno to drop from the scantable"
167                return
168            else:
169                raise RuntimeError("No scan given")
170        try:
171            scanid = _to_list(scanid)
172            allscans = unique([ self.getscan(i) for i in range(self.nrow())])
173            for sid in scanid: allscans.remove(sid)
174            if len(allscans) == 0:
175                raise ValueError("Can't remove all scans")
176        except ValueError:
177            if rcParams['verbose']:
178                print "Couldn't find any match."
179                return
180            else: raise
181        try:
182            sel = selector(scans=allscans)
183            return self._select_copy(sel)
184        except RuntimeError:
185            if rcParams['verbose']:
186                print "Couldn't find any match."
187            else:
188                raise
189
190    def _select_copy(self, selection):
191        orig = self.get_selection()
192        self.set_selection(orig+selection)
193        cp = self.copy()
194        self.set_selection(orig)
195        return cp
196
197    def get_scan(self, scanid=None):
198        """
199        Return a specific scan (by scanno) or collection of scans (by
200        source name) in a new scantable.
201        Note:
202            See scantable.drop_scan() for the inverse operation.
203        Parameters:
204            scanid:    a (list of) scanno or a source name, unix-style
205                       patterns are accepted for source name matching, e.g.
206                       '*_R' gets all 'ref scans
207        Example:
208            # get all scans containing the source '323p459'
209            newscan = scan.get_scan('323p459')
210            # get all 'off' scans
211            refscans = scan.get_scan('*_R')
212            # get a susbset of scans by scanno (as listed in scan.summary())
213            newscan = scan.get_scan([0, 2, 7, 10])
214        """
215        if scanid is None:
216            if rcParams['verbose']:
217                print "Please specify a scan no or name to " \
218                      "retrieve from the scantable"
219                return
220            else:
221                raise RuntimeError("No scan given")
222
223        try:
224            bsel = self.get_selection()
225            sel = selector()
226            if type(scanid) is str:
227                sel.set_name(scanid)
228                return self._select_copy(sel)
229            elif type(scanid) is int:
230                sel.set_scans([scanid])
231                return self._select_copy(sel)
232            elif type(scanid) is list:
233                sel.set_scans(scanid)
234                return self._select_copy(sel)
235            else:
236                msg = "Illegal scanid type, use 'int' or 'list' if ints."
237                if rcParams['verbose']:
238                    print msg
239                else:
240                    raise TypeError(msg)
241        except RuntimeError:
242            if rcParams['verbose']: print "Couldn't find any match."
243            else: raise
244
245    def __str__(self):
246        return Scantable._summary(self, True)
247
248    def summary(self, filename=None):
249        """
250        Print a summary of the contents of this scantable.
251        Parameters:
252            filename:    the name of a file to write the putput to
253                         Default - no file output
254        """
255        info = Scantable._summary(self, True)
256        if filename is not None:
257            if filename is "":
258                filename = 'scantable_summary.txt'
259            from os.path import expandvars, isdir
260            filename = expandvars(filename)
261            if not isdir(filename):
262                data = open(filename, 'w')
263                data.write(info)
264                data.close()
265            else:
266                msg = "Illegal file name '%s'." % (filename)
267                if rcParams['verbose']:
268                    print msg
269                else:
270                    raise IOError(msg)
271        if rcParams['verbose']:
272            try:
273                from IPython.genutils import page as pager
274            except ImportError:
275                from pydoc import pager
276            pager(info)
277        else:
278            return info
279
280    def get_spectrum(self, rowno):
281        """Return the spectrum for the current row in the scantable as a list.
282        Parameters:
283             rowno:   the row number to retrieve the spectrum from
284        """
285        return self._getspectrum(rowno)
286
287    def get_mask(self, rowno):
288        """Return the mask for the current row in the scantable as a list.
289        Parameters:
290             rowno:   the row number to retrieve the mask from
291        """
292        return self._getmask(rowno)
293
294    def set_spectrum(self, spec, rowno):
295        """Return the spectrum for the current row in the scantable as a list.
296        Parameters:
297             spec:   the spectrum
298             rowno:    the row number to set the spectrum for
299        """
300        assert(len(spec) == self.nchan())
301        return self._setspectrum(spec, rowno)
302
303    def get_coordinate(self, rowno):
304        """Return the (spectral) coordinate for a a given 'rowno'.
305        NOTE:
306            * This coordinate is only valid until a scantable method modifies
307              the frequency axis.
308            * This coordinate does contain the original frequency set-up
309              NOT the new frame. The conversions however are done using the user
310              specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
311              use scantable.freq_align first. Without it there is no closure,
312              i.e.
313              c = myscan.get_coordinate(0)
314              c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
315
316        Parameters:
317             rowno:    the row number for the spectral coordinate
318
319        """
320        return coordinate(Scantable.get_coordinate(self, rowno))
321
322    def get_selection(self):
323        """
324        Get the selection object currently set on this scantable.
325        Parameters:
326            none
327        Example:
328            sel = scan.get_selection()
329            sel.set_ifs(0)              # select IF 0
330            scan.set_selection(sel)     # apply modified selection
331        """
332        return selector(self._getselection())
333
334    def set_selection(self, selection=None, **kw):
335        """
336        Select a subset of the data. All following operations on this scantable
337        are only applied to thi selection.
338        Parameters:
339            selection:    a selector object (default unset the selection),
340
341            or
342
343            any combination of
344            "pols", "ifs", "beams", "scans", "cycles", "name", "query"
345
346        Examples:
347            sel = selector()         # create a selection object
348            self.set_scans([0, 3])    # select SCANNO 0 and 3
349            scan.set_selection(sel)  # set the selection
350            scan.summary()           # will only print summary of scanno 0 an 3
351            scan.set_selection()     # unset the selection
352            # or the equivalent
353            scan.set_selection(scans=[0,3])
354            scan.summary()           # will only print summary of scanno 0 an 3
355            scan.set_selection()     # unset the selection
356        """
357        if selection is None:
358            # reset
359            if len(kw) == 0:
360                selection = selector()
361            else:
362                # try keywords
363                for k in kw:
364                    if k not in selector.fields:
365                        raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
366                selection = selector(**kw)
367        self._setselection(selection)
368
369    def stats(self, stat='stddev', mask=None):
370        """
371        Determine the specified statistic of the current beam/if/pol
372        Takes a 'mask' as an optional parameter to specify which
373        channels should be excluded.
374        Parameters:
375            stat:    'min', 'max', 'sumsq', 'sum', 'mean'
376                     'var', 'stddev', 'avdev', 'rms', 'median'
377            mask:    an optional mask specifying where the statistic
378                     should be determined.
379        Example:
380            scan.set_unit('channel')
381            msk = scan.create_mask([100, 200], [500, 600])
382            scan.stats(stat='mean', mask=m)
383        """
384        mask = mask or []
385        if not self._check_ifs():
386            raise ValueError("Cannot apply mask as the IFs have different "
387                             "number of channels. Please use setselection() "
388                             "to select individual IFs")
389
390        statvals = self._math._stats(self, mask, stat)
391        def cb(i):
392            return statvals[i]
393
394        return self._row_callback(cb, stat)
395
396    def stddev(self, mask=None):
397        """
398        Determine the standard deviation of the current beam/if/pol
399        Takes a 'mask' as an optional parameter to specify which
400        channels should be excluded.
401        Parameters:
402            mask:    an optional mask specifying where the standard
403                     deviation should be determined.
404
405        Example:
406            scan.set_unit('channel')
407            msk = scan.create_mask([100, 200], [500, 600])
408            scan.stddev(mask=m)
409        """
410        return self.stats(stat='stddev', mask=mask);
411
412
413    def get_column_names(self):
414        """
415        Return a  list of column names, which can be used for selection.
416        """
417        return list(Scantable.get_column_names(self))
418
419    def get_tsys(self):
420        """
421        Return the System temperatures.
422        Returns:
423            a list of Tsys values for the current selection
424        """
425
426        return self._row_callback(self._gettsys, "Tsys")
427
428    def _row_callback(self, callback, label):
429        out = ""
430        outvec = []
431        sep = '-'*50
432        for i in range(self.nrow()):
433            tm = self._gettime(i)
434            src = self._getsourcename(i)
435            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
436            out += 'Time[%s]:\n' % (tm)
437            if self.nbeam(-1) > 1:
438                out +=  ' Beam[%d] ' % (self.getbeam(i))
439            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (self.getif(i))
440            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (self.getpol(i))
441            outvec.append(callback(i))
442            out += '= %3.3f\n' % (outvec[i])
443            out +=  sep+'\n'
444        if rcParams['verbose']:
445            print sep
446            print " %s" % (label)
447            print sep
448            print out
449        return outvec
450
451    def _get_column(self, callback, row=-1):
452        """
453        """
454        if row == -1:
455            return [callback(i) for i in range(self.nrow())]
456        else:
457            if  0 <= row < self.nrow():
458                return callback(row)
459
460
461    def get_time(self, row=-1, asdatetime=False):
462        """
463        Get a list of time stamps for the observations.
464        Return a datetime object for each integration time stamp in the scantable.
465        Parameters:
466            row:          row no of integration. Default -1 return all rows
467            asdatetime:   return values as datetime objects rather than strings
468        Example:
469            none
470        """
471        from time import strptime
472        from datetime import datetime
473        times = self._get_column(self._gettime, row)
474        if not asdatetime:
475            return times
476        format = "%Y/%m/%d/%H:%M:%S"
477        if isinstance(times, list):
478            return [datetime(*strptime(i, format)[:6]) for i in times]
479        else:
480            return datetime(*strptime(times, format)[:6])
481
482
483    def get_inttime(self, row=-1):
484        """
485        Get a list of integration times for the observations.
486        Return a time in seconds for each integration in the scantable.
487        Parameters:
488            row:    row no of integration. Default -1 return all rows.
489        Example:
490            none
491        """
492        return self._get_column(self._getinttime, row)
493
494
495    def get_sourcename(self, row=-1):
496        """
497        Get a list source names for the observations.
498        Return a string for each integration in the scantable.
499        Parameters:
500            row:    row no of integration. Default -1 return all rows.
501        Example:
502            none
503        """
504        return self._get_column(self._getsourcename, row)
505
506    def get_elevation(self, row=-1):
507        """
508        Get a list of elevations for the observations.
509        Return a float for each integration in the scantable.
510        Parameters:
511            row:    row no of integration. Default -1 return all rows.
512        Example:
513            none
514        """
515        return self._get_column(self._getelevation, row)
516
517    def get_azimuth(self, row=-1):
518        """
519        Get a list of azimuths for the observations.
520        Return a float for each integration in the scantable.
521        Parameters:
522            row:    row no of integration. Default -1 return all rows.
523        Example:
524            none
525        """
526        return self._get_column(self._getazimuth, row)
527
528    def get_parangle(self, row=-1):
529        """
530        Get a list of parallactic angles for the observations.
531        Return a float for each integration in the scantable.
532        Parameters:
533            row:    row no of integration. Default -1 return all rows.
534        Example:
535            none
536        """
537        return self._get_column(self._getparangle, row)
538
539    def get_direction(self, row=-1):
540        """
541        Get a list of Positions on the sky (direction) for the observations.
542        Return a string for each integration in the scantable.
543        Parameters:
544            row:    row no of integration. Default -1 return all rows
545        Example:
546            none
547        """
548        return self._get_column(self._getdirection, row)
549
550    def get_directionval(self, row=-1):
551        """
552        Get a list of Positions on the sky (direction) for the observations.
553        Return a float for each integration in the scantable.
554        Parameters:
555            row:    row no of integration. Default -1 return all rows
556        Example:
557            none
558        """
559        return self._get_column(self._getdirectionvec, row)
560
561
562    def set_unit(self, unit='channel'):
563        """
564        Set the unit for all following operations on this scantable
565        Parameters:
566            unit:    optional unit, default is 'channel'
567                     one of '*Hz', 'km/s', 'channel', ''
568        """
569        varlist = vars()
570        if unit in ['', 'pixel', 'channel']:
571            unit = ''
572        inf = list(self._getcoordinfo())
573        inf[0] = unit
574        self._setcoordinfo(inf)
575        self._add_history("set_unit", varlist)
576
577    @print_log_dec
578    def set_instrument(self, instr):
579        """
580        Set the instrument for subsequent processing.
581        Parameters:
582            instr:    Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
583                      'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
584        """
585        self._setInstrument(instr)
586        self._add_history("set_instument", vars())
587
588    @print_log_dec
589    def set_feedtype(self, feedtype):
590        """
591        Overwrite the feed type, which might not be set correctly.
592        Parameters:
593            feedtype:     'linear' or 'circular'
594        """
595        self._setfeedtype(feedtype)
596        self._add_history("set_feedtype", vars())
597
598    @print_log_dec
599    def set_doppler(self, doppler='RADIO'):
600        """
601        Set the doppler for all following operations on this scantable.
602        Parameters:
603            doppler:    One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
604        """
605        varlist = vars()
606        inf = list(self._getcoordinfo())
607        inf[2] = doppler
608        self._setcoordinfo(inf)
609        self._add_history("set_doppler", vars())
610
611    @print_log_dec
612    def set_freqframe(self, frame=None):
613        """
614        Set the frame type of the Spectral Axis.
615        Parameters:
616            frame:   an optional frame type, default 'LSRK'. Valid frames are:
617                     'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
618                     'GEO', 'GALACTO', 'LGROUP', 'CMB'
619        Examples:
620            scan.set_freqframe('BARY')
621        """
622        frame = frame or rcParams['scantable.freqframe']
623        varlist = vars()
624        valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
625                   'GEO', 'GALACTO', 'LGROUP', 'CMB']
626
627        if frame in valid:
628            inf = list(self._getcoordinfo())
629            inf[1] = frame
630            self._setcoordinfo(inf)
631            self._add_history("set_freqframe", varlist)
632        else:
633            msg  = "Please specify a valid freq type. Valid types are:\n", valid
634            if rcParams['verbose']:
635                print msg
636            else:
637                raise TypeError(msg)
638
639    def set_dirframe(self, frame=""):
640        """
641        Set the frame type of the Direction on the sky.
642        Parameters:
643            frame:   an optional frame type, default ''. Valid frames are:
644                     'J2000', 'B1950', 'GALACTIC'
645        Examples:
646            scan.set_dirframe('GALACTIC')
647        """
648        varlist = vars()
649        try:
650            Scantable.set_dirframe(self, frame)
651        except RuntimeError, msg:
652            if rcParams['verbose']:
653                print msg
654            else:
655                raise
656        self._add_history("set_dirframe", varlist)
657
658    def get_unit(self):
659        """
660        Get the default unit set in this scantable
661        Returns:
662            A unit string
663        """
664        inf = self._getcoordinfo()
665        unit = inf[0]
666        if unit == '': unit = 'channel'
667        return unit
668
669    def get_abcissa(self, rowno=0):
670        """
671        Get the abcissa in the current coordinate setup for the currently
672        selected Beam/IF/Pol
673        Parameters:
674            rowno:    an optional row number in the scantable. Default is the
675                      first row, i.e. rowno=0
676        Returns:
677            The abcissa values and the format string (as a dictionary)
678        """
679        abc = self._getabcissa(rowno)
680        lbl = self._getabcissalabel(rowno)
681        return abc, lbl
682
683    def flag(self, mask=None):
684        """
685        Flag the selected data using an optional channel mask.
686        Parameters:
687            mask:   an optional channel mask, created with create_mask. Default
688                    (no mask) is all channels.
689        """
690        varlist = vars()
691        mask = mask or []
692        try:
693            self._flag(mask)
694        except RuntimeError, msg:
695            if rcParams['verbose']:
696                print msg
697                return
698            else: raise
699        self._add_history("flag", varlist)
700
701    @print_log_dec
702    def lag_flag(self, start, end, unit="MHz", insitu=None):
703        """
704        Flag the data in 'lag' space by providing a frequency to remove.
705        Flagged data in the scantable gets interpolated over the region.
706        No taper is applied.
707        Parameters:
708            start:    the start frequency (really a period within the
709                      bandwidth)  or period to remove
710            end:      the end frequency or period to remove
711            unit:     the frequency unit (default "MHz") or "" for
712                      explicit lag channels
713        Notes:
714            It is recommended to flag edges of the band or strong
715            signals beforehand.
716        """
717        if insitu is None: insitu = rcParams['insitu']
718        self._math._setinsitu(insitu)
719        varlist = vars()
720        base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
721        if not (unit == "" or base.has_key(unit)):
722            raise ValueError("%s is not a valid unit." % unit)
723        try:
724            if unit == "":
725                s = scantable(self._math._lag_flag(self, start, end, "lags"))
726            else:
727                s = scantable(self._math._lag_flag(self, start*base[unit],
728                                                   end*base[unit], "frequency"))
729        except RuntimeError, msg:
730            if rcParams['verbose']:
731                print msg
732                return
733            else: raise
734        s._add_history("lag_flag", varlist)
735        if insitu:
736            self._assign(s)
737        else:
738            return s
739
740    @print_log_dec
741    def create_mask(self, *args, **kwargs):
742        """
743        Compute and return a mask based on [min, max] windows.
744        The specified windows are to be INCLUDED, when the mask is
745        applied.
746        Parameters:
747            [min, max], [min2, max2], ...
748                Pairs of start/end points (inclusive)specifying the regions
749                to be masked
750            invert:     optional argument. If specified as True,
751                        return an inverted mask, i.e. the regions
752                        specified are EXCLUDED
753            row:        create the mask using the specified row for
754                        unit conversions, default is row=0
755                        only necessary if frequency varies over rows.
756        Example:
757            scan.set_unit('channel')
758            a)
759            msk = scan.create_mask([400, 500], [800, 900])
760            # masks everything outside 400 and 500
761            # and 800 and 900 in the unit 'channel'
762
763            b)
764            msk = scan.create_mask([400, 500], [800, 900], invert=True)
765            # masks the regions between 400 and 500
766            # and 800 and 900 in the unit 'channel'
767            c)
768            mask only channel 400
769            msk =  scan.create_mask([400])
770        """
771        row = kwargs.get("row", 0)
772        data = self._getabcissa(row)
773        u = self._getcoordinfo()[0]
774        if rcParams['verbose']:
775            if u == "": u = "channel"
776            msg = "The current mask window unit is %s" % u
777            i = self._check_ifs()
778            if not i:
779                msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
780            asaplog.push(msg)
781        n = self.nchan()
782        msk = _n_bools(n, False)
783        # test if args is a 'list' or a 'normal *args - UGLY!!!
784
785        ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
786             and args or args[0]
787        for window in ws:
788            if len(window) == 1:
789                window = [window[0], window[0]]
790            if len(window) == 0 or  len(window) > 2:
791                raise ValueError("A window needs to be defined as [start(, end)]")
792            if window[0] > window[1]:
793                tmp = window[0]
794                window[0] = window[1]
795                window[1] = tmp
796            for i in range(n):
797                if data[i] >= window[0] and data[i] <= window[1]:
798                    msk[i] = True
799        if kwargs.has_key('invert'):
800            if kwargs.get('invert'):
801                msk = mask_not(msk)
802        return msk
803
804    def get_restfreqs(self):
805        """
806        Get the restfrequency(s) stored in this scantable.
807        The return value(s) are always of unit 'Hz'
808        Parameters:
809            none
810        Returns:
811            a list of doubles
812        """
813        return list(self._getrestfreqs())
814
815
816    def set_restfreqs(self, freqs=None, unit='Hz'):
817        """
818        Set or replace the restfrequency specified and
819        If the 'freqs' argument holds a scalar,
820        then that rest frequency will be applied to all the selected
821        data.  If the 'freqs' argument holds
822        a vector, then it MUST be of equal or smaller length than
823        the number of IFs (and the available restfrequencies will be
824        replaced by this vector).  In this case, *all* data have
825        the restfrequency set per IF according
826        to the corresponding value you give in the 'freqs' vector.
827        E.g. 'freqs=[1e9, 2e9]'  would mean IF 0 gets restfreq 1e9 and
828        IF 1 gets restfreq 2e9.
829        You can also specify the frequencies via a linecatalog.
830
831        Parameters:
832            freqs:   list of rest frequency values or string idenitfiers
833            unit:    unit for rest frequency (default 'Hz')
834
835        Example:
836            # set the given restfrequency for the whole table
837            scan.set_restfreqs(freqs=1.4e9)
838            # If thee number of IFs in the data is >= 2 IF0 gets the first
839            # value IF1 the second...
840            scan.set_restfreqs(freqs=[1.4e9, 1.67e9])
841            #set the given restfrequency for the whole table (by name)
842            scan.set_restfreqs(freqs="OH1667")
843
844        Note:
845            To do more sophisticate Restfrequency setting, e.g. on a
846            source and IF basis, use scantable.set_selection() before using
847            this function.
848            # provide your scantable is called scan
849            selection = selector()
850            selection.set_name("ORION*")
851            selection.set_ifs([1])
852            scan.set_selection(selection)
853            scan.set_restfreqs(freqs=86.6e9)
854
855        """
856        varlist = vars()
857        from asap import linecatalog
858        # simple  value
859        if isinstance(freqs, int) or isinstance(freqs, float):
860            self._setrestfreqs(freqs, "",unit)
861        # list of values
862        elif isinstance(freqs, list) or isinstance(freqs, tuple):
863            # list values are scalars
864            if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
865                sel = selector()
866                savesel = self._getselection()
867                iflist = self.getifnos()
868                for i in xrange(len(freqs)):
869                    sel.set_ifs(iflist[i])
870                    self._setselection(sel)
871                    self._setrestfreqs(freqs[i], "",unit)
872                self._setselection(savesel)
873            # list values are tuples, (value, name)
874            elif isinstance(freqs[-1], dict):
875                sel = selector()
876                savesel = self._getselection()
877                iflist = self.getifnos()
878                for i in xrange(len(freqs)):
879                    sel.set_ifs(iflist[i])
880                    self._setselection(sel)
881                    self._setrestfreqs(freqs[i]["value"],
882                                       freqs[i]["name"], "MHz")
883                self._setselection(savesel)
884        # freqs are to be taken from a linecatalog
885        elif isinstance(freqs, linecatalog):
886            sel = selector()
887            savesel = self._getselection()
888            iflist = self.getifnos()
889            for i in xrange(freqs.nrow()):
890                sel.set_ifs(iflist[i])
891                self._setselection(sel)
892                self._setrestfreqs(freqs.get_frequency(i),
893                                   freqs.get_name(i), "MHz")
894                # ensure that we are not iterating past nIF
895                if i == self.nif()-1: break
896            self._setselection(savesel)
897        else:
898            return
899        self._add_history("set_restfreqs", varlist)
900
901    def shift_refpix(self, delta):
902        """
903        Shift the reference pixel of the Spectra Coordinate by an
904        integer amount.
905        Parameters:
906            delta:   the amount to shift by
907        Note:
908            Be careful using this with broadband data.
909        """
910        Scantable.shift(self, delta)
911
912    def history(self, filename=None):
913        """
914        Print the history. Optionally to a file.
915        Parameters:
916            filename:    The name  of the file to save the history to.
917        """
918        hist = list(self._gethistory())
919        out = "-"*80
920        for h in hist:
921            if h.startswith("---"):
922                out += "\n"+h
923            else:
924                items = h.split("##")
925                date = items[0]
926                func = items[1]
927                items = items[2:]
928                out += "\n"+date+"\n"
929                out += "Function: %s\n  Parameters:" % (func)
930                for i in items:
931                    s = i.split("=")
932                    out += "\n   %s = %s" % (s[0], s[1])
933                out += "\n"+"-"*80
934        if filename is not None:
935            if filename is "":
936                filename = 'scantable_history.txt'
937            import os
938            filename = os.path.expandvars(os.path.expanduser(filename))
939            if not os.path.isdir(filename):
940                data = open(filename, 'w')
941                data.write(out)
942                data.close()
943            else:
944                msg = "Illegal file name '%s'." % (filename)
945                if rcParams['verbose']:
946                    print msg
947                else:
948                    raise IOError(msg)
949        if rcParams['verbose']:
950            try:
951                from IPython.genutils import page as pager
952            except ImportError:
953                from pydoc import pager
954            pager(out)
955        else:
956            return out
957        return
958    #
959    # Maths business
960    #
961    @print_log_dec
962    def average_time(self, mask=None, scanav=False, weight='tint', align=False):
963        """
964        Return the (time) weighted average of a scan.
965        Note:
966            in channels only - align if necessary
967        Parameters:
968            mask:     an optional mask (only used for 'var' and 'tsys'
969                      weighting)
970            scanav:   True averages each scan separately
971                      False (default) averages all scans together,
972            weight:   Weighting scheme.
973                      'none'     (mean no weight)
974                      'var'      (1/var(spec) weighted)
975                      'tsys'     (1/Tsys**2 weighted)
976                      'tint'     (integration time weighted)
977                      'tintsys'  (Tint/Tsys**2)
978                      'median'   ( median averaging)
979                      The default is 'tint'
980            align:    align the spectra in velocity before averaging. It takes
981                      the time of the first spectrum as reference time.
982        Example:
983            # time average the scantable without using a mask
984            newscan = scan.average_time()
985        """
986        varlist = vars()
987        weight = weight or 'TINT'
988        mask = mask or ()
989        scanav = (scanav and 'SCAN') or 'NONE'
990        scan = (self, )
991        try:
992            if align:
993                scan = (self.freq_align(insitu=False), )
994            s = None
995            if weight.upper() == 'MEDIAN':
996                s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
997                                                         scanav))
998            else:
999                s = scantable(self._math._average(scan, mask, weight.upper(),
1000                              scanav))
1001        except RuntimeError, msg:
1002            if rcParams['verbose']:
1003                print msg
1004                return
1005            else: raise
1006        s._add_history("average_time", varlist)
1007        return s
1008
1009    @print_log_dec
1010    def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
1011        """
1012        Return a scan where all spectra are converted to either
1013        Jansky or Kelvin depending upon the flux units of the scan table.
1014        By default the function tries to look the values up internally.
1015        If it can't find them (or if you want to over-ride), you must
1016        specify EITHER jyperk OR eta (and D which it will try to look up
1017        also if you don't set it). jyperk takes precedence if you set both.
1018        Parameters:
1019            jyperk:      the Jy / K conversion factor
1020            eta:         the aperture efficiency
1021            d:           the geomtric diameter (metres)
1022            insitu:      if False a new scantable is returned.
1023                         Otherwise, the scaling is done in-situ
1024                         The default is taken from .asaprc (False)
1025        """
1026        if insitu is None: insitu = rcParams['insitu']
1027        self._math._setinsitu(insitu)
1028        varlist = vars()
1029        jyperk = jyperk or -1.0
1030        d = d or -1.0
1031        eta = eta or -1.0
1032        s = scantable(self._math._convertflux(self, d, eta, jyperk))
1033        s._add_history("convert_flux", varlist)
1034        if insitu: self._assign(s)
1035        else: return s
1036
1037    @print_log_dec
1038    def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1039        """
1040        Return a scan after applying a gain-elevation correction.
1041        The correction can be made via either a polynomial or a
1042        table-based interpolation (and extrapolation if necessary).
1043        You specify polynomial coefficients, an ascii table or neither.
1044        If you specify neither, then a polynomial correction will be made
1045        with built in coefficients known for certain telescopes (an error
1046        will occur if the instrument is not known).
1047        The data and Tsys are *divided* by the scaling factors.
1048        Parameters:
1049            poly:        Polynomial coefficients (default None) to compute a
1050                         gain-elevation correction as a function of
1051                         elevation (in degrees).
1052            filename:    The name of an ascii file holding correction factors.
1053                         The first row of the ascii file must give the column
1054                         names and these MUST include columns
1055                         "ELEVATION" (degrees) and "FACTOR" (multiply data
1056                         by this) somewhere.
1057                         The second row must give the data type of the
1058                         column. Use 'R' for Real and 'I' for Integer.
1059                         An example file would be
1060                         (actual factors are arbitrary) :
1061
1062                         TIME ELEVATION FACTOR
1063                         R R R
1064                         0.1 0 0.8
1065                         0.2 20 0.85
1066                         0.3 40 0.9
1067                         0.4 60 0.85
1068                         0.5 80 0.8
1069                         0.6 90 0.75
1070            method:      Interpolation method when correcting from a table.
1071                         Values are  "nearest", "linear" (default), "cubic"
1072                         and "spline"
1073            insitu:      if False a new scantable is returned.
1074                         Otherwise, the scaling is done in-situ
1075                         The default is taken from .asaprc (False)
1076        """
1077
1078        if insitu is None: insitu = rcParams['insitu']
1079        self._math._setinsitu(insitu)
1080        varlist = vars()
1081        poly = poly or ()
1082        from os.path import expandvars
1083        filename = expandvars(filename)
1084        s = scantable(self._math._gainel(self, poly, filename, method))
1085        s._add_history("gain_el", varlist)
1086        if insitu:
1087            self._assign(s)
1088        else:
1089            return s
1090
1091    @print_log_dec
1092    def freq_align(self, reftime=None, method='cubic', insitu=None):
1093        """
1094        Return a scan where all rows have been aligned in frequency/velocity.
1095        The alignment frequency frame (e.g. LSRK) is that set by function
1096        set_freqframe.
1097        Parameters:
1098            reftime:     reference time to align at. By default, the time of
1099                         the first row of data is used.
1100            method:      Interpolation method for regridding the spectra.
1101                         Choose from "nearest", "linear", "cubic" (default)
1102                         and "spline"
1103            insitu:      if False a new scantable is returned.
1104                         Otherwise, the scaling is done in-situ
1105                         The default is taken from .asaprc (False)
1106        """
1107        if insitu is None: insitu = rcParams["insitu"]
1108        self._math._setinsitu(insitu)
1109        varlist = vars()
1110        reftime = reftime or ""
1111        s = scantable(self._math._freq_align(self, reftime, method))
1112        s._add_history("freq_align", varlist)
1113        if insitu: self._assign(s)
1114        else: return s
1115
1116    @print_log_dec
1117    def opacity(self, tau, insitu=None):
1118        """
1119        Apply an opacity correction. The data
1120        and Tsys are multiplied by the correction factor.
1121        Parameters:
1122            tau:         (list of) opacity from which the correction factor is
1123                         exp(tau*ZD)
1124                         where ZD is the zenith-distance.
1125                         If a list is provided, it has to be of length nIF,
1126                         nIF*nPol or 1 and in order of IF/POL, e.g.
1127                         [opif0pol0, opif0pol1, opif1pol0 ...]
1128            insitu:      if False a new scantable is returned.
1129                         Otherwise, the scaling is done in-situ
1130                         The default is taken from .asaprc (False)
1131        """
1132        if insitu is None: insitu = rcParams['insitu']
1133        self._math._setinsitu(insitu)
1134        varlist = vars()
1135        if not hasattr(tau, "__len__"):
1136            tau = [tau]
1137        s = scantable(self._math._opacity(self, tau))
1138        s._add_history("opacity", varlist)
1139        if insitu: self._assign(s)
1140        else: return s
1141
1142    @print_log_dec
1143    def bin(self, width=5, insitu=None):
1144        """
1145        Return a scan where all spectra have been binned up.
1146        Parameters:
1147            width:       The bin width (default=5) in pixels
1148            insitu:      if False a new scantable is returned.
1149                         Otherwise, the scaling is done in-situ
1150                         The default is taken from .asaprc (False)
1151        """
1152        if insitu is None: insitu = rcParams['insitu']
1153        self._math._setinsitu(insitu)
1154        varlist = vars()
1155        s = scantable(self._math._bin(self, width))
1156        s._add_history("bin", varlist)
1157        if insitu:
1158            self._assign(s)
1159        else:
1160            return s
1161
1162    @print_log_dec
1163    def resample(self, width=5, method='cubic', insitu=None):
1164        """
1165        Return a scan where all spectra have been binned up.
1166
1167        Parameters:
1168            width:       The bin width (default=5) in pixels
1169            method:      Interpolation method when correcting from a table.
1170                         Values are  "nearest", "linear", "cubic" (default)
1171                         and "spline"
1172            insitu:      if False a new scantable is returned.
1173                         Otherwise, the scaling is done in-situ
1174                         The default is taken from .asaprc (False)
1175        """
1176        if insitu is None: insitu = rcParams['insitu']
1177        self._math._setinsitu(insitu)
1178        varlist = vars()
1179        s = scantable(self._math._resample(self, method, width))
1180        s._add_history("resample", varlist)
1181        if insitu: self._assign(s)
1182        else: return s
1183
1184    @print_log_dec
1185    def average_pol(self, mask=None, weight='none'):
1186        """
1187        Average the Polarisations together.
1188        Parameters:
1189            mask:        An optional mask defining the region, where the
1190                         averaging will be applied. The output will have all
1191                         specified points masked.
1192            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1193                         weighted), or 'tsys' (1/Tsys**2 weighted)
1194        """
1195        varlist = vars()
1196        mask = mask or ()
1197        s = scantable(self._math._averagepol(self, mask, weight.upper()))
1198        s._add_history("average_pol", varlist)
1199        return s
1200
1201    @print_log_dec
1202    def average_beam(self, mask=None, weight='none'):
1203        """
1204        Average the Beams together.
1205        Parameters:
1206            mask:        An optional mask defining the region, where the
1207                         averaging will be applied. The output will have all
1208                         specified points masked.
1209            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1210                         weighted), or 'tsys' (1/Tsys**2 weighted)
1211        """
1212        varlist = vars()
1213        mask = mask or ()
1214        s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1215        s._add_history("average_beam", varlist)
1216        return s
1217
1218    def parallactify(self, pflag):
1219        """
1220        Set a flag to inidcate whether this data should be treated as having
1221        been 'parallactified' (total phase == 0.0)
1222        Parameters:
1223            pflag:  Bool inidcating whether to turn this on (True) or
1224                    off (False)
1225        """
1226        varlist = vars()
1227        self._parallactify(pflag)
1228        self._add_history("parallactify", varlist)
1229
1230    @print_log_dec
1231    def convert_pol(self, poltype=None):
1232        """
1233        Convert the data to a different polarisation type.
1234        Note that you will need cross-polarisation terms for most conversions.
1235        Parameters:
1236            poltype:    The new polarisation type. Valid types are:
1237                        "linear", "circular", "stokes" and "linpol"
1238        """
1239        varlist = vars()
1240        try:
1241            s = scantable(self._math._convertpol(self, poltype))
1242        except RuntimeError, msg:
1243            if rcParams['verbose']:
1244                print msg
1245                return
1246            else:
1247                raise
1248        s._add_history("convert_pol", varlist)
1249        return s
1250
1251    @print_log_dec
1252    def smooth(self, kernel="hanning", width=5.0, order=2, insitu=None):
1253        """
1254        Smooth the spectrum by the specified kernel (conserving flux).
1255        Parameters:
1256            kernel:     The type of smoothing kernel. Select from
1257                        'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1258                        or 'poly'
1259            width:      The width of the kernel in pixels. For hanning this is
1260                        ignored otherwise it defauls to 5 pixels.
1261                        For 'gaussian' it is the Full Width Half
1262                        Maximum. For 'boxcar' it is the full width.
1263                        For 'rmedian' and 'poly' it is the half width.
1264            order:      Optional parameter for 'poly' kernel (default is 2), to
1265                        specify the order of the polnomial. Ignored by all other
1266                        kernels.
1267            insitu:     if False a new scantable is returned.
1268                        Otherwise, the scaling is done in-situ
1269                        The default is taken from .asaprc (False)
1270        Example:
1271             none
1272        """
1273        if insitu is None: insitu = rcParams['insitu']
1274        self._math._setinsitu(insitu)
1275        varlist = vars()
1276        s = scantable(self._math._smooth(self, kernel.lower(), width, order))
1277        s._add_history("smooth", varlist)
1278        if insitu: self._assign(s)
1279        else: return s
1280
1281    @print_log_dec
1282    def poly_baseline(self, mask=None, order=0, plot=False, uselin=False,
1283                      insitu=None):
1284        """
1285        Return a scan which has been baselined (all rows) by a polynomial.
1286        Parameters:
1287            mask:       an optional mask
1288            order:      the order of the polynomial (default is 0)
1289            plot:       plot the fit and the residual. In this each
1290                        indivual fit has to be approved, by typing 'y'
1291                        or 'n'
1292            uselin:     use linear polynomial fit
1293            insitu:     if False a new scantable is returned.
1294                        Otherwise, the scaling is done in-situ
1295                        The default is taken from .asaprc (False)
1296        Example:
1297            # return a scan baselined by a third order polynomial,
1298            # not using a mask
1299            bscan = scan.poly_baseline(order=3)
1300        """
1301        if insitu is None: insitu = rcParams['insitu']
1302        varlist = vars()
1303        if mask is None:
1304            mask = [True for i in xrange(self.nchan(-1))]
1305        from asap.asapfitter import fitter
1306        try:
1307            f = fitter()
1308            f.set_scan(self, mask)
1309            #f.set_function(poly=order)
1310            if uselin:
1311                f.set_function(lpoly=order)
1312            else:
1313                f.set_function(poly=order)
1314            s = f.auto_fit(insitu, plot=plot)
1315            s._add_history("poly_baseline", varlist)
1316            if insitu: self._assign(s)
1317            else: return s
1318        except RuntimeError:
1319            msg = "The fit failed, possibly because it didn't converge."
1320            if rcParams['verbose']:
1321                print msg
1322                return
1323            else:
1324                raise RuntimeError(msg)
1325
1326    def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
1327                           threshold=3, chan_avg_limit=1, plot=False,
1328                           insitu=None):
1329        """
1330        Return a scan which has been baselined (all rows) by a polynomial.
1331        Spectral lines are detected first using linefinder and masked out
1332        to avoid them affecting the baseline solution.
1333
1334        Parameters:
1335            mask:       an optional mask retreived from scantable
1336            edge:       an optional number of channel to drop at
1337                        the edge of spectrum. If only one value is
1338                        specified, the same number will be dropped from
1339                        both sides of the spectrum. Default is to keep
1340                        all channels. Nested tuples represent individual
1341                        edge selection for different IFs (a number of spectral
1342                        channels can be different)
1343            order:      the order of the polynomial (default is 0)
1344            threshold:  the threshold used by line finder. It is better to
1345                        keep it large as only strong lines affect the
1346                        baseline solution.
1347            chan_avg_limit:
1348                        a maximum number of consequtive spectral channels to
1349                        average during the search of weak and broad lines.
1350                        The default is no averaging (and no search for weak
1351                        lines). If such lines can affect the fitted baseline
1352                        (e.g. a high order polynomial is fitted), increase this
1353                        parameter (usually values up to 8 are reasonable). Most
1354                        users of this method should find the default value
1355                        sufficient.
1356            plot:       plot the fit and the residual. In this each
1357                        indivual fit has to be approved, by typing 'y'
1358                        or 'n'
1359            insitu:     if False a new scantable is returned.
1360                        Otherwise, the scaling is done in-situ
1361                        The default is taken from .asaprc (False)
1362
1363        Example:
1364            scan2=scan.auto_poly_baseline(order=7)
1365        """
1366        if insitu is None: insitu = rcParams['insitu']
1367        varlist = vars()
1368        from asap.asapfitter import fitter
1369        from asap.asaplinefind import linefinder
1370        from asap import _is_sequence_or_number as _is_valid
1371
1372        # check whether edge is set up for each IF individually
1373        individualedge = False;
1374        if len(edge) > 1:
1375            if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1376                individualedge = True;
1377
1378        if not _is_valid(edge, int) and not individualedge:
1379            raise ValueError, "Parameter 'edge' has to be an integer or a \
1380            pair of integers specified as a tuple. Nested tuples are allowed \
1381            to make individual selection for different IFs."
1382
1383        curedge = (0, 0)
1384        if individualedge:
1385            for edgepar in edge:
1386                if not _is_valid(edgepar, int):
1387                    raise ValueError, "Each element of the 'edge' tuple has \
1388                                       to be a pair of integers or an integer."
1389        else:
1390            curedge = edge;
1391
1392        # setup fitter
1393        f = fitter()
1394        f.set_function(poly=order)
1395
1396        # setup line finder
1397        fl = linefinder()
1398        fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
1399
1400        if not insitu:
1401            workscan = self.copy()
1402        else:
1403            workscan = self
1404
1405        fl.set_scan(workscan)
1406
1407        rows = range(workscan.nrow())
1408        asaplog.push("Processing:")
1409        for r in rows:
1410            msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1411                (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1412                 workscan.getpol(r), workscan.getcycle(r))
1413            asaplog.push(msg, False)
1414
1415            # figure out edge parameter
1416            if individualedge:
1417                if len(edge) >= workscan.getif(r):
1418                    raise RuntimeError, "Number of edge elements appear to " \
1419                                        "be less than the number of IFs"
1420                    curedge = edge[workscan.getif(r)]
1421
1422            # setup line finder
1423            fl.find_lines(r, mask, curedge)
1424            f.set_data(workscan._getabcissa(r),  workscan._getspectrum(r),
1425                        mask_and(workscan._getmask(r), fl.get_mask()))
1426            f.fit()
1427            x = f.get_parameters()
1428            if plot:
1429                f.plot(residual=True)
1430                x = raw_input("Accept fit ( [y]/n ): ")
1431                if x.upper() == 'N':
1432                    continue
1433            workscan._setspectrum(f.fitter.getresidual(), r)
1434        if plot:
1435            f._p.unmap()
1436            f._p = None
1437        workscan._add_history("auto_poly_baseline", varlist)
1438        if insitu:
1439            self._assign(workscan)
1440        else:
1441            return workscan
1442
1443    @print_log_dec
1444    def rotate_linpolphase(self, angle):
1445        """
1446        Rotate the phase of the complex polarization O=Q+iU correlation.
1447        This is always done in situ in the raw data.  So if you call this
1448        function more than once then each call rotates the phase further.
1449        Parameters:
1450            angle:   The angle (degrees) to rotate (add) by.
1451        Examples:
1452            scan.rotate_linpolphase(2.3)
1453        """
1454        varlist = vars()
1455        self._math._rotate_linpolphase(self, angle)
1456        self._add_history("rotate_linpolphase", varlist)
1457        return
1458
1459    @print_log_dec
1460    def rotate_xyphase(self, angle):
1461        """
1462        Rotate the phase of the XY correlation.  This is always done in situ
1463        in the data.  So if you call this function more than once
1464        then each call rotates the phase further.
1465        Parameters:
1466            angle:   The angle (degrees) to rotate (add) by.
1467        Examples:
1468            scan.rotate_xyphase(2.3)
1469        """
1470        varlist = vars()
1471        self._math._rotate_xyphase(self, angle)
1472        self._add_history("rotate_xyphase", varlist)
1473        return
1474
1475    @print_log_dec
1476    def swap_linears(self):
1477        """
1478        Swap the linear polarisations XX and YY, or better the first two
1479        polarisations as this also works for ciculars.
1480        """
1481        varlist = vars()
1482        self._math._swap_linears(self)
1483        self._add_history("swap_linears", varlist)
1484        return
1485
1486    @print_log_dec
1487    def invert_phase(self):
1488        """
1489        Invert the phase of the complex polarisation
1490        """
1491        varlist = vars()
1492        self._math._invert_phase(self)
1493        self._add_history("invert_phase", varlist)
1494        return
1495
1496    @print_log_dec
1497    def add(self, offset, insitu=None):
1498        """
1499        Return a scan where all spectra have the offset added
1500        Parameters:
1501            offset:      the offset
1502            insitu:      if False a new scantable is returned.
1503                         Otherwise, the scaling is done in-situ
1504                         The default is taken from .asaprc (False)
1505        """
1506        if insitu is None: insitu = rcParams['insitu']
1507        self._math._setinsitu(insitu)
1508        varlist = vars()
1509        s = scantable(self._math._unaryop(self, offset, "ADD", False))
1510        s._add_history("add", varlist)
1511        if insitu:
1512            self._assign(s)
1513        else:
1514            return s
1515
1516    @print_log_dec
1517    def scale(self, factor, tsys=True, insitu=None):
1518        """
1519        Return a scan where all spectra are scaled by the give 'factor'
1520        Parameters:
1521            factor:      the scaling factor
1522            insitu:      if False a new scantable is returned.
1523                         Otherwise, the scaling is done in-situ
1524                         The default is taken from .asaprc (False)
1525            tsys:        if True (default) then apply the operation to Tsys
1526                         as well as the data
1527        """
1528        if insitu is None: insitu = rcParams['insitu']
1529        self._math._setinsitu(insitu)
1530        varlist = vars()
1531        s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
1532        s._add_history("scale", varlist)
1533        if insitu:
1534            self._assign(s)
1535        else:
1536            return s
1537
1538    def set_sourcetype(self, match, matchtype="pattern",
1539                       sourcetype="reference"):
1540        """
1541        Set the type of the source to be an source or reference scan
1542        using the provided pattern:
1543        Parameters:
1544            match:          a Unix style pattern, regular expression or selector
1545            matchtype:      'pattern' (default) UNIX style pattern or
1546                            'regex' regular expression
1547            sourcetype:     the type of the source to use (source/reference)
1548        """
1549        varlist = vars()
1550        basesel = self.get_selection()
1551        stype = -1
1552        if sourcetype.lower().startswith("r"):
1553            stype = 1
1554        elif sourcetype.lower().startswith("s"):
1555            stype = 0
1556        else:
1557            raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
1558        if matchtype.lower().startswith("p"):
1559            matchtype = "pattern"
1560        elif matchtype.lower().startswith("r"):
1561            matchtype = "regex"
1562        else:
1563            raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
1564        sel = selector()
1565        if isinstance(match, selector):
1566            sel = match
1567        else:
1568            sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
1569        self.set_selection(basesel+sel)
1570        self._setsourcetype(stype)
1571        self.set_selection(basesel)
1572        self._add_history("set_sourcetype", varlist)
1573
1574    @print_log_dec
1575    def auto_quotient(self, preserve=True, mode='paired'):
1576        """
1577        This function allows to build quotients automatically.
1578        It assumes the observation to have the same numer of
1579        "ons" and "offs"
1580        Parameters:
1581            preserve:       you can preserve (default) the continuum or
1582                            remove it.  The equations used are
1583                            preserve: Output = Toff * (on/off) - Toff
1584                            remove:   Output = Toff * (on/off) - Ton
1585            mode:           the on/off detection mode
1586                            'paired' (default)
1587                            identifies 'off' scans by the
1588                            trailing '_R' (Mopra/Parkes) or
1589                            '_e'/'_w' (Tid) and matches
1590                            on/off pairs from the observing pattern
1591                            'time'
1592                            finds the closest off in time
1593
1594        """
1595        modes = ["time", "paired"]
1596        if not mode in modes:
1597            msg = "please provide valid mode. Valid modes are %s" % (modes)
1598            raise ValueError(msg)
1599        varlist = vars()
1600        s = None
1601        if mode.lower() == "paired":
1602            basesel = self.get_selection()
1603            sel = selector()+basesel
1604            sel.set_query("SRCTYPE==1")
1605            self.set_selection(sel)
1606            offs = self.copy()
1607            sel.set_query("SRCTYPE==0")
1608            self.set_selection(sel)
1609            ons = self.copy()
1610            s = scantable(self._math._quotient(ons, offs, preserve))
1611            self.set_selection(basesel)
1612        elif mode.lower() == "time":
1613            s = scantable(self._math._auto_quotient(self, mode, preserve))
1614        s._add_history("auto_quotient", varlist)
1615        return s
1616
1617    @print_log_dec
1618    def mx_quotient(self, mask = None, weight='median', preserve=True):
1619        """
1620        Form a quotient using "off" beams when observing in "MX" mode.
1621        Parameters:
1622            mask:           an optional mask to be used when weight == 'stddev'
1623            weight:         How to average the off beams.  Default is 'median'.
1624            preserve:       you can preserve (default) the continuum or
1625                            remove it.  The equations used are
1626                            preserve: Output = Toff * (on/off) - Toff
1627                            remove:   Output = Toff * (on/off) - Ton
1628        """
1629        mask = mask or ()
1630        varlist = vars()
1631        on = scantable(self._math._mx_extract(self, 'on'))
1632        preoff = scantable(self._math._mx_extract(self, 'off'))
1633        off = preoff.average_time(mask=mask, weight=weight, scanav=False)
1634        from asapmath  import quotient
1635        q = quotient(on, off, preserve)
1636        q._add_history("mx_quotient", varlist)
1637        return q
1638
1639    @print_log_dec
1640    def freq_switch(self, insitu=None):
1641        """
1642        Apply frequency switching to the data.
1643        Parameters:
1644            insitu:      if False a new scantable is returned.
1645                         Otherwise, the swictching is done in-situ
1646                         The default is taken from .asaprc (False)
1647        Example:
1648            none
1649        """
1650        if insitu is None: insitu = rcParams['insitu']
1651        self._math._setinsitu(insitu)
1652        varlist = vars()
1653        s = scantable(self._math._freqswitch(self))
1654        s._add_history("freq_switch", varlist)
1655        if insitu: self._assign(s)
1656        else: return s
1657
1658    @print_log_dec
1659    def recalc_azel(self):
1660        """
1661        Recalculate the azimuth and elevation for each position.
1662        Parameters:
1663            none
1664        Example:
1665        """
1666        varlist = vars()
1667        self._recalcazel()
1668        self._add_history("recalc_azel", varlist)
1669        return
1670
1671    @print_log_dec
1672    def __add__(self, other):
1673        varlist = vars()
1674        s = None
1675        if isinstance(other, scantable):
1676            s = scantable(self._math._binaryop(self, other, "ADD"))
1677        elif isinstance(other, float):
1678            s = scantable(self._math._unaryop(self, other, "ADD", False))
1679        else:
1680            raise TypeError("Other input is not a scantable or float value")
1681        s._add_history("operator +", varlist)
1682        return s
1683
1684    @print_log_dec
1685    def __sub__(self, other):
1686        """
1687        implicit on all axes and on Tsys
1688        """
1689        varlist = vars()
1690        s = None
1691        if isinstance(other, scantable):
1692            s = scantable(self._math._binaryop(self, other, "SUB"))
1693        elif isinstance(other, float):
1694            s = scantable(self._math._unaryop(self, other, "SUB", False))
1695        else:
1696            raise TypeError("Other input is not a scantable or float value")
1697        s._add_history("operator -", varlist)
1698        return s
1699
1700    @print_log_dec
1701    def __mul__(self, other):
1702        """
1703        implicit on all axes and on Tsys
1704        """
1705        varlist = vars()
1706        s = None
1707        if isinstance(other, scantable):
1708            s = scantable(self._math._binaryop(self, other, "MUL"))
1709        elif isinstance(other, float):
1710            s = scantable(self._math._unaryop(self, other, "MUL", False))
1711        else:
1712            raise TypeError("Other input is not a scantable or float value")
1713        s._add_history("operator *", varlist)
1714        return s
1715
1716
1717    @print_log_dec
1718    def __div__(self, other):
1719        """
1720        implicit on all axes and on Tsys
1721        """
1722        varlist = vars()
1723        s = None
1724        if isinstance(other, scantable):
1725            s = scantable(self._math._binaryop(self, other, "DIV"))
1726        elif isinstance(other, float):
1727            if other == 0.0:
1728                raise ZeroDivisionError("Dividing by zero is not recommended")
1729            s = scantable(self._math._unaryop(self, other, "DIV", False))
1730        else:
1731            raise TypeError("Other input is not a scantable or float value")
1732        s._add_history("operator /", varlist)
1733        return s
1734
1735    def get_fit(self, row=0):
1736        """
1737        Print or return the stored fits for a row in the scantable
1738        Parameters:
1739            row:    the row which the fit has been applied to.
1740        """
1741        if row > self.nrow():
1742            return
1743        from asap.asapfit import asapfit
1744        fit = asapfit(self._getfit(row))
1745        if rcParams['verbose']:
1746            print fit
1747            return
1748        else:
1749            return fit.as_dict()
1750
1751    def flag_nans(self):
1752        """
1753        Utility function to flag NaN values in the scantable.
1754        """
1755        import numpy
1756        basesel = self.get_selection()
1757        for i in range(self.nrow()):
1758            sel = self.get_row_selector(i)
1759            self.set_selection(basesel+sel)
1760            nans = numpy.isnan(self._getspectrum(0))
1761        if numpy.any(nans):
1762            bnans = [ bool(v) for v in nans]
1763            self.flag(bnans)
1764        self.set_selection(basesel)
1765
1766    def get_row_selector(self, rowno):
1767        return selector(beams=self.getbeam(rowno),
1768                        ifs=self.getif(rowno),
1769                        pols=self.getpol(rowno),
1770                        scans=self.getscan(rowno),
1771                        cycles=self.getcycle(rowno))
1772
1773    def _add_history(self, funcname, parameters):
1774        if not rcParams['scantable.history']:
1775            return
1776        # create date
1777        sep = "##"
1778        from datetime import datetime
1779        dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1780        hist = dstr+sep
1781        hist += funcname+sep#cdate+sep
1782        if parameters.has_key('self'): del parameters['self']
1783        for k, v in parameters.iteritems():
1784            if type(v) is dict:
1785                for k2, v2 in v.iteritems():
1786                    hist += k2
1787                    hist += "="
1788                    if isinstance(v2, scantable):
1789                        hist += 'scantable'
1790                    elif k2 == 'mask':
1791                        if isinstance(v2, list) or isinstance(v2, tuple):
1792                            hist += str(self._zip_mask(v2))
1793                        else:
1794                            hist += str(v2)
1795                    else:
1796                        hist += str(v2)
1797            else:
1798                hist += k
1799                hist += "="
1800                if isinstance(v, scantable):
1801                    hist += 'scantable'
1802                elif k == 'mask':
1803                    if isinstance(v, list) or isinstance(v, tuple):
1804                        hist += str(self._zip_mask(v))
1805                    else:
1806                        hist += str(v)
1807                else:
1808                    hist += str(v)
1809            hist += sep
1810        hist = hist[:-2] # remove trailing '##'
1811        self._addhistory(hist)
1812
1813
1814    def _zip_mask(self, mask):
1815        mask = list(mask)
1816        i = 0
1817        segments = []
1818        while mask[i:].count(1):
1819            i += mask[i:].index(1)
1820            if mask[i:].count(0):
1821                j = i + mask[i:].index(0)
1822            else:
1823                j = len(mask)
1824            segments.append([i, j])
1825            i = j
1826        return segments
1827
1828    def _get_ordinate_label(self):
1829        fu = "("+self.get_fluxunit()+")"
1830        import re
1831        lbl = "Intensity"
1832        if re.match(".K.", fu):
1833            lbl = "Brightness Temperature "+ fu
1834        elif re.match(".Jy.", fu):
1835            lbl = "Flux density "+ fu
1836        return lbl
1837
1838    def _check_ifs(self):
1839        nchans = [self.nchan(i) for i in range(self.nif(-1))]
1840        nchans = filter(lambda t: t > 0, nchans)
1841        return (sum(nchans)/len(nchans) == nchans[0])
1842
1843    def _fill(self, names, unit, average):
1844        import os
1845        from asap._asap import stfiller
1846        first = True
1847        fullnames = []
1848        for name in names:
1849            name = os.path.expandvars(name)
1850            name = os.path.expanduser(name)
1851            if not os.path.exists(name):
1852                msg = "File '%s' does not exists" % (name)
1853                if rcParams['verbose']:
1854                    asaplog.push(msg)
1855                    print asaplog.pop().strip()
1856                    return
1857                raise IOError(msg)
1858            fullnames.append(name)
1859        if average:
1860            asaplog.push('Auto averaging integrations')
1861        stype = int(rcParams['scantable.storage'].lower() == 'disk')
1862        for name in fullnames:
1863            tbl = Scantable(stype)
1864            r = stfiller(tbl)
1865            rx = rcParams['scantable.reference']
1866            r._setreferenceexpr(rx)
1867            msg = "Importing %s..." % (name)
1868            asaplog.push(msg, False)
1869            print_log()
1870            r._open(name, -1, -1)
1871            r._read()
1872            if average:
1873                tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
1874            if not first:
1875                tbl = self._math._merge([self, tbl])
1876            Scantable.__init__(self, tbl)
1877            r._close()
1878            del r, tbl
1879            first = False
1880        if unit is not None:
1881            self.set_fluxunit(unit)
1882        self.set_freqframe(rcParams['scantable.freqframe'])
1883
1884    def __getitem__(self, key):
1885        if key < 0:
1886            key += self.nrow()
1887        if key >= self.nrow():
1888            raise IndexError("Row index out of range.")
1889        return self._getspectrum(key)
1890
1891    def __setitem__(self, key, value):
1892        if key < 0:
1893            key += self.nrow()
1894        if key >= self.nrow():
1895            raise IndexError("Row index out of range.")
1896        if not hasattr(value, "__len__") or \
1897                len(value) > self.nchan(self.getif(key)):
1898            raise ValueError("Spectrum length doesn't match.")
1899        return self._setspectrum(value, key)
1900
1901    def __len__(self):
1902        return self.nrow()
1903
1904    def __iter__(self):
1905        for i in range(len(self)):
1906            yield self[i]
Note: See TracBrowser for help on using the repository browser.