source: trunk/python/scantable.py @ 1689

Last change on this file since 1689 was 1689, checked in by Malte Marquarding, 14 years ago

Ticket #177: added skydip function to determine opacities. This resulted in a slight interface change to accept lists of opacities in the scantable.opacity function

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 70.9 KB
Line 
1import functools
2from asap._asap import Scantable
3from asap import rcParams
4from asap import print_log, print_log_dec
5from asap import asaplog
6from asap import selector
7from asap import linecatalog
8from asap.coordinate import coordinate
9from asap import _n_bools, mask_not, mask_and, mask_or
10
11
12def preserve_selection(func):
13    @functools.wraps(func)
14    def wrap(obj, *args, **kw):
15        basesel = obj.get_selection()
16        val = func(obj, *args, **kw)
17        obj.set_selection(basesel)
18        return val
19    return wrap
20
21
22class scantable(Scantable):
23    """
24        The ASAP container for scans
25    """
26    @print_log_dec
27    def __init__(self, filename, average=None, unit=None, parallactify=None):
28        """
29        Create a scantable from a saved one or make a reference
30        Parameters:
31            filename:    the name of an asap table on disk
32                         or
33                         the name of a rpfits/sdfits/ms file
34                         (integrations within scans are auto averaged
35                         and the whole file is read)
36                         or
37                         [advanced] a reference to an existing
38                         scantable
39            average:     average all integrations withinb a scan on read.
40                         The default (True) is taken from .asaprc.
41            unit:         brightness unit; must be consistent with K or Jy.
42                         Over-rides the default selected by the reader
43                         (input rpfits/sdfits/ms) or replaces the value
44                         in existing scantables
45            parallactify: Indcicate that the data had been parallatified.
46                          Default is taken form rc file.
47        """
48        if average is None:
49            average = rcParams['scantable.autoaverage']
50        parallactify = parallactify or rcParams['scantable.parallactify']
51        varlist = vars()
52        from asap._asap import stmath
53        self._math = stmath()
54        if isinstance(filename, Scantable):
55            Scantable.__init__(self, filename)
56        else:
57            if isinstance(filename, str):# or \
58#                (isinstance(filename, list) or isinstance(filename, tuple)) \
59#                  and isinstance(filename[-1], str):
60                import os.path
61                filename = os.path.expandvars(filename)
62                filename = os.path.expanduser(filename)
63                if not os.path.exists(filename):
64                    s = "File '%s' not found." % (filename)
65                    if rcParams['verbose']:
66                        asaplog.push(s)
67                        #print asaplog.pop().strip()
68                        return
69                    raise IOError(s)
70                if os.path.isdir(filename) \
71                    and not os.path.exists(filename+'/table.f1'):
72                    # crude check if asap table
73                    if os.path.exists(filename+'/table.info'):
74                        ondisk = rcParams['scantable.storage'] == 'disk'
75                        Scantable.__init__(self, filename, ondisk)
76                        if unit is not None:
77                            self.set_fluxunit(unit)
78                        self.set_freqframe(rcParams['scantable.freqframe'])
79                    else:
80                        msg = "The given file '%s'is not a valid " \
81                              "asap table." % (filename)
82                        if rcParams['verbose']:
83                            print msg
84                            return
85                        else:
86                            raise IOError(msg)
87                else:
88                    self._fill([filename], unit, average)
89            elif (isinstance(filename, list) or isinstance(filename, tuple)) \
90                  and isinstance(filename[-1], str):
91                self._fill(filename, unit, average)
92        self.parallactify(parallactify)
93        self._add_history("scantable", varlist)
94
95    @print_log_dec
96    def save(self, name=None, format=None, overwrite=False):
97        """
98        Store the scantable on disk. This can be an asap (aips++) Table,
99        SDFITS or MS2 format.
100        Parameters:
101            name:        the name of the outputfile. For format "ASCII"
102                         this is the root file name (data in 'name'.txt
103                         and header in 'name'_header.txt)
104            format:      an optional file format. Default is ASAP.
105                         Allowed are - 'ASAP' (save as ASAP [aips++] Table),
106                                       'SDFITS' (save as SDFITS file)
107                                       'ASCII' (saves as ascii text file)
108                                       'MS2' (saves as an aips++
109                                              MeasurementSet V2)
110                                       'FITS' (save as image FITS - not
111                                               readable by class)
112                                       'CLASS' (save as FITS readable by CLASS)
113            overwrite:   If the file should be overwritten if it exists.
114                         The default False is to return with warning
115                         without writing the output. USE WITH CARE.
116        Example:
117            scan.save('myscan.asap')
118            scan.save('myscan.sdfits', 'SDFITS')
119        """
120        from os import path
121        format = format or rcParams['scantable.save']
122        suffix = '.'+format.lower()
123        if name is None or name == "":
124            name = 'scantable'+suffix
125            msg = "No filename given. Using default name %s..." % name
126            asaplog.push(msg)
127        name = path.expandvars(name)
128        if path.isfile(name) or path.isdir(name):
129            if not overwrite:
130                msg = "File %s exists." % name
131                if rcParams['verbose']:
132                    print msg
133                    return
134                else:
135                    raise IOError(msg)
136        format2 = format.upper()
137        if format2 == 'ASAP':
138            self._save(name)
139        else:
140            from asap._asap import stwriter as stw
141            writer = stw(format2)
142            writer.write(self, name)
143        return
144
145    def copy(self):
146        """
147        Return a copy of this scantable.
148        Note:
149            This makes a full (deep) copy. scan2 = scan1 makes a reference.
150        Parameters:
151            none
152        Example:
153            copiedscan = scan.copy()
154        """
155        sd = scantable(Scantable._copy(self))
156        return sd
157
158    def drop_scan(self, scanid=None):
159        """
160        Return a new scantable where the specified scan number(s) has(have)
161        been dropped.
162        Parameters:
163            scanid:    a (list of) scan number(s)
164        """
165        from asap import _is_sequence_or_number as _is_valid
166        from asap import _to_list
167        from asap import unique
168        if not _is_valid(scanid):
169            if rcParams['verbose']:
170                print "Please specify a scanno to drop from the scantable"
171                return
172            else:
173                raise RuntimeError("No scan given")
174        try:
175            scanid = _to_list(scanid)
176            allscans = unique([ self.getscan(i) for i in range(self.nrow())])
177            for sid in scanid: allscans.remove(sid)
178            if len(allscans) == 0:
179                raise ValueError("Can't remove all scans")
180        except ValueError:
181            if rcParams['verbose']:
182                print "Couldn't find any match."
183                return
184            else: raise
185        try:
186            sel = selector(scans=allscans)
187            return self._select_copy(sel)
188        except RuntimeError:
189            if rcParams['verbose']:
190                print "Couldn't find any match."
191            else:
192                raise
193
194    def _select_copy(self, selection):
195        orig = self.get_selection()
196        self.set_selection(orig+selection)
197        cp = self.copy()
198        self.set_selection(orig)
199        return cp
200
201    def get_scan(self, scanid=None):
202        """
203        Return a specific scan (by scanno) or collection of scans (by
204        source name) in a new scantable.
205        Note:
206            See scantable.drop_scan() for the inverse operation.
207        Parameters:
208            scanid:    a (list of) scanno or a source name, unix-style
209                       patterns are accepted for source name matching, e.g.
210                       '*_R' gets all 'ref scans
211        Example:
212            # get all scans containing the source '323p459'
213            newscan = scan.get_scan('323p459')
214            # get all 'off' scans
215            refscans = scan.get_scan('*_R')
216            # get a susbset of scans by scanno (as listed in scan.summary())
217            newscan = scan.get_scan([0, 2, 7, 10])
218        """
219        if scanid is None:
220            if rcParams['verbose']:
221                print "Please specify a scan no or name to " \
222                      "retrieve from the scantable"
223                return
224            else:
225                raise RuntimeError("No scan given")
226
227        try:
228            bsel = self.get_selection()
229            sel = selector()
230            if type(scanid) is str:
231                sel.set_name(scanid)
232                return self._select_copy(sel)
233            elif type(scanid) is int:
234                sel.set_scans([scanid])
235                return self._select_copy(sel)
236            elif type(scanid) is list:
237                sel.set_scans(scanid)
238                return self._select_copy(sel)
239            else:
240                msg = "Illegal scanid type, use 'int' or 'list' if ints."
241                if rcParams['verbose']:
242                    print msg
243                else:
244                    raise TypeError(msg)
245        except RuntimeError:
246            if rcParams['verbose']: print "Couldn't find any match."
247            else: raise
248
249    def __str__(self):
250        return Scantable._summary(self, True)
251
252    def summary(self, filename=None):
253        """
254        Print a summary of the contents of this scantable.
255        Parameters:
256            filename:    the name of a file to write the putput to
257                         Default - no file output
258            verbose:     print extra info such as the frequency table
259                         The default (False) is taken from .asaprc
260        """
261        info = Scantable._summary(self, True)
262        if filename is not None:
263            if filename is "":
264                filename = 'scantable_summary.txt'
265            from os.path import expandvars, isdir
266            filename = expandvars(filename)
267            if not isdir(filename):
268                data = open(filename, 'w')
269                data.write(info)
270                data.close()
271            else:
272                msg = "Illegal file name '%s'." % (filename)
273                if rcParams['verbose']:
274                    print msg
275                else:
276                    raise IOError(msg)
277        if rcParams['verbose']:
278            try:
279                from IPython.genutils import page as pager
280            except ImportError:
281                from pydoc import pager
282            pager(info)
283        else:
284            return info
285
286    def get_spectrum(self, rowno):
287        """Return the spectrum for the current row in the scantable as a list.
288        Parameters:
289             rowno:   the row number to retrieve the spectrum from
290        """
291        return self._getspectrum(rowno)
292
293    def get_mask(self, rowno):
294        """Return the mask for the current row in the scantable as a list.
295        Parameters:
296             rowno:   the row number to retrieve the mask from
297        """
298        return self._getmask(rowno)
299
300    def set_spectrum(self, spec, rowno):
301        """Return the spectrum for the current row in the scantable as a list.
302        Parameters:
303             spec:   the spectrum
304             rowno:    the row number to set the spectrum for
305        """
306        assert(len(spec) == self.nchan())
307        return self._setspectrum(spec, rowno)
308
309    def get_coordinate(self, rowno):
310        """Return the (spectral) coordinate for a a given 'rowno'.
311        NOTE:
312            * This coordinate is only valid until a scantable method modifies
313              the frequency axis.
314            * This coordinate does contain the original frequency set-up
315              NOT the new frame. The conversions however are done using the user
316              specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
317              use scantable.freq_align first. Without it there is no closure,
318              i.e.
319              c = myscan.get_coordinate(0)
320              c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
321
322        Parameters:
323             rowno:    the row number for the spectral coordinate
324
325        """
326        return coordinate(Scantable.get_coordinate(self, rowno))
327
328    def get_selection(self):
329        """
330        Get the selection object currently set on this scantable.
331        Parameters:
332            none
333        Example:
334            sel = scan.get_selection()
335            sel.set_ifs(0)              # select IF 0
336            scan.set_selection(sel)     # apply modified selection
337        """
338        return selector(self._getselection())
339
340    def set_selection(self, selection=None, **kw):
341        """
342        Select a subset of the data. All following operations on this scantable
343        are only applied to thi selection.
344        Parameters:
345            selection:    a selector object (default unset the selection)
346        Examples:
347            sel = selector()         # create a selection object
348            self.set_scans([0, 3])    # select SCANNO 0 and 3
349            scan.set_selection(sel)  # set the selection
350            scan.summary()           # will only print summary of scanno 0 an 3
351            scan.set_selection()     # unset the selection
352        """
353        if selection is None:
354            # reset
355            if len(kw) == 0:
356                selection = selector()
357            else:
358                # try keywords
359                for k in kw:
360                    if k not in selector.fields:
361                        raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
362                selection = selector(**kw)
363        self._setselection(selection)
364
365    def stats(self, stat='stddev', mask=None):
366        """
367        Determine the specified statistic of the current beam/if/pol
368        Takes a 'mask' as an optional parameter to specify which
369        channels should be excluded.
370        Parameters:
371            stat:    'min', 'max', 'sumsq', 'sum', 'mean'
372                     'var', 'stddev', 'avdev', 'rms', 'median'
373            mask:    an optional mask specifying where the statistic
374                     should be determined.
375        Example:
376            scan.set_unit('channel')
377            msk = scan.create_mask([100, 200], [500, 600])
378            scan.stats(stat='mean', mask=m)
379        """
380        mask = mask or []
381        if not self._check_ifs():
382            raise ValueError("Cannot apply mask as the IFs have different "
383                             "number of channels. Please use setselection() "
384                             "to select individual IFs")
385
386        statvals = self._math._stats(self, mask, stat)
387        def cb(i):
388            return statvals[i]
389
390        return self._row_callback(cb, stat)
391
392    def stddev(self, mask=None):
393        """
394        Determine the standard deviation of the current beam/if/pol
395        Takes a 'mask' as an optional parameter to specify which
396        channels should be excluded.
397        Parameters:
398            mask:    an optional mask specifying where the standard
399                     deviation should be determined.
400
401        Example:
402            scan.set_unit('channel')
403            msk = scan.create_mask([100, 200], [500, 600])
404            scan.stddev(mask=m)
405        """
406        return self.stats(stat='stddev', mask=mask);
407
408
409    def get_column_names(self):
410        """
411        Return a  list of column names, which can be used for selection.
412        """
413        return list(Scantable.get_column_names(self))
414
415    def get_tsys(self):
416        """
417        Return the System temperatures.
418        Returns:
419            a list of Tsys values for the current selection
420        """
421
422        return self._row_callback(self._gettsys, "Tsys")
423
424    def _row_callback(self, callback, label):
425        out = ""
426        outvec = []
427        sep = '-'*50
428        for i in range(self.nrow()):
429            tm = self._gettime(i)
430            src = self._getsourcename(i)
431            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
432            out += 'Time[%s]:\n' % (tm)
433            if self.nbeam(-1) > 1:
434                out +=  ' Beam[%d] ' % (self.getbeam(i))
435            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (self.getif(i))
436            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (self.getpol(i))
437            outvec.append(callback(i))
438            out += '= %3.3f\n' % (outvec[i])
439            out +=  sep+'\n'
440        if rcParams['verbose']:
441            print sep
442            print " %s" % (label)
443            print sep
444            print out
445        return outvec
446
447    def _get_column(self, callback, row=-1):
448        """
449        """
450        if row == -1:
451            return [callback(i) for i in range(self.nrow())]
452        else:
453            if  0 <= row < self.nrow():
454                return callback(row)
455
456
457    def get_time(self, row=-1, asdatetime=False):
458        """
459        Get a list of time stamps for the observations.
460        Return a datetime object for each integration time stamp in the scantable.
461        Parameters:
462            row:          row no of integration. Default -1 return all rows
463            asdatetime:   return values as datetime objects rather than strings
464        Example:
465            none
466        """
467        from time import strptime
468        from datetime import datetime
469        times = self._get_column(self._gettime, row)
470        if not asdatetime:
471            return times
472        format = "%Y/%m/%d/%H:%M:%S"
473        if isinstance(times, list):
474            return [datetime(*strptime(i, format)[:6]) for i in times]
475        else:
476            return datetime(*strptime(times, format)[:6])
477
478
479    def get_inttime(self, row=-1):
480        """
481        Get a list of integration times for the observations.
482        Return a time in seconds for each integration in the scantable.
483        Parameters:
484            row:    row no of integration. Default -1 return all rows.
485        Example:
486            none
487        """
488        return self._get_column(self._getinttime, row)
489
490
491    def get_sourcename(self, row=-1):
492        """
493        Get a list source names for the observations.
494        Return a string for each integration in the scantable.
495        Parameters:
496            row:    row no of integration. Default -1 return all rows.
497        Example:
498            none
499        """
500        return self._get_column(self._getsourcename, row)
501
502    def get_elevation(self, row=-1):
503        """
504        Get a list of elevations for the observations.
505        Return a float for each integration in the scantable.
506        Parameters:
507            row:    row no of integration. Default -1 return all rows.
508        Example:
509            none
510        """
511        return self._get_column(self._getelevation, row)
512
513    def get_azimuth(self, row=-1):
514        """
515        Get a list of azimuths for the observations.
516        Return a float for each integration in the scantable.
517        Parameters:
518            row:    row no of integration. Default -1 return all rows.
519        Example:
520            none
521        """
522        return self._get_column(self._getazimuth, row)
523
524    def get_parangle(self, row=-1):
525        """
526        Get a list of parallactic angles for the observations.
527        Return a float for each integration in the scantable.
528        Parameters:
529            row:    row no of integration. Default -1 return all rows.
530        Example:
531            none
532        """
533        return self._get_column(self._getparangle, row)
534
535    def get_direction(self, row=-1):
536        """
537        Get a list of Positions on the sky (direction) for the observations.
538        Return a string for each integration in the scantable.
539        Parameters:
540            row:    row no of integration. Default -1 return all rows
541        Example:
542            none
543        """
544        return self._get_column(self._getdirection, row)
545
546    def get_directionval(self, row=-1):
547        """
548        Get a list of Positions on the sky (direction) for the observations.
549        Return a float for each integration in the scantable.
550        Parameters:
551            row:    row no of integration. Default -1 return all rows
552        Example:
553            none
554        """
555        return self._get_column(self._getdirectionvec, row)
556
557
558    def set_unit(self, unit='channel'):
559        """
560        Set the unit for all following operations on this scantable
561        Parameters:
562            unit:    optional unit, default is 'channel'
563                     one of '*Hz', 'km/s', 'channel', ''
564        """
565        varlist = vars()
566        if unit in ['', 'pixel', 'channel']:
567            unit = ''
568        inf = list(self._getcoordinfo())
569        inf[0] = unit
570        self._setcoordinfo(inf)
571        self._add_history("set_unit", varlist)
572
573    @print_log_dec
574    def set_instrument(self, instr):
575        """
576        Set the instrument for subsequent processing.
577        Parameters:
578            instr:    Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
579                      'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
580        """
581        self._setInstrument(instr)
582        self._add_history("set_instument", vars())
583
584    @print_log_dec
585    def set_feedtype(self, feedtype):
586        """
587        Overwrite the feed type, which might not be set correctly.
588        Parameters:
589            feedtype:     'linear' or 'circular'
590        """
591        self._setfeedtype(feedtype)
592        self._add_history("set_feedtype", vars())
593
594    @print_log_dec
595    def set_doppler(self, doppler='RADIO'):
596        """
597        Set the doppler for all following operations on this scantable.
598        Parameters:
599            doppler:    One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
600        """
601        varlist = vars()
602        inf = list(self._getcoordinfo())
603        inf[2] = doppler
604        self._setcoordinfo(inf)
605        self._add_history("set_doppler", vars())
606
607    @print_log_dec
608    def set_freqframe(self, frame=None):
609        """
610        Set the frame type of the Spectral Axis.
611        Parameters:
612            frame:   an optional frame type, default 'LSRK'. Valid frames are:
613                     'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
614                     'GEO', 'GALACTO', 'LGROUP', 'CMB'
615        Examples:
616            scan.set_freqframe('BARY')
617        """
618        frame = frame or rcParams['scantable.freqframe']
619        varlist = vars()
620        valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
621                   'GEO', 'GALACTO', 'LGROUP', 'CMB']
622
623        if frame in valid:
624            inf = list(self._getcoordinfo())
625            inf[1] = frame
626            self._setcoordinfo(inf)
627            self._add_history("set_freqframe", varlist)
628        else:
629            msg  = "Please specify a valid freq type. Valid types are:\n", valid
630            if rcParams['verbose']:
631                print msg
632            else:
633                raise TypeError(msg)
634
635    def set_dirframe(self, frame=""):
636        """
637        Set the frame type of the Direction on the sky.
638        Parameters:
639            frame:   an optional frame type, default ''. Valid frames are:
640                     'J2000', 'B1950', 'GALACTIC'
641        Examples:
642            scan.set_dirframe('GALACTIC')
643        """
644        varlist = vars()
645        try:
646            Scantable.set_dirframe(self, frame)
647        except RuntimeError, msg:
648            if rcParams['verbose']:
649                print msg
650            else:
651                raise
652        self._add_history("set_dirframe", varlist)
653
654    def get_unit(self):
655        """
656        Get the default unit set in this scantable
657        Returns:
658            A unit string
659        """
660        inf = self._getcoordinfo()
661        unit = inf[0]
662        if unit == '': unit = 'channel'
663        return unit
664
665    def get_abcissa(self, rowno=0):
666        """
667        Get the abcissa in the current coordinate setup for the currently
668        selected Beam/IF/Pol
669        Parameters:
670            rowno:    an optional row number in the scantable. Default is the
671                      first row, i.e. rowno=0
672        Returns:
673            The abcissa values and the format string (as a dictionary)
674        """
675        abc = self._getabcissa(rowno)
676        lbl = self._getabcissalabel(rowno)
677        return abc, lbl
678
679    def flag(self, mask=None):
680        """
681        Flag the selected data using an optional channel mask.
682        Parameters:
683            mask:   an optional channel mask, created with create_mask. Default
684                    (no mask) is all channels.
685        """
686        varlist = vars()
687        mask = mask or []
688        try:
689            self._flag(mask)
690        except RuntimeError, msg:
691            if rcParams['verbose']:
692                print msg
693                return
694            else: raise
695        self._add_history("flag", varlist)
696
697    @print_log_dec
698    def lag_flag(self, start, end, unit="MHz", insitu=None):
699        """
700        Flag the data in 'lag' space by providing a frequency to remove.
701        Flagged data in the scantable gets interpolated over the region.
702        No taper is applied.
703        Parameters:
704            start:    the start frequency (really a period within the
705                      bandwidth)  or period to remove
706            end:      the end frequency or period to remove
707            unit:     the frequency unit (default "MHz") or "" for
708                      explicit lag channels
709        Notes:
710            It is recommended to flag edges of the band or strong
711            signals beforehand.
712        """
713        if insitu is None: insitu = rcParams['insitu']
714        self._math._setinsitu(insitu)
715        varlist = vars()
716        base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
717        if not (unit == "" or base.has_key(unit)):
718            raise ValueError("%s is not a valid unit." % unit)
719        try:
720            if unit == "":
721                s = scantable(self._math._lag_flag(self, start, end, "lags"))
722            else:
723                s = scantable(self._math._lag_flag(self, start*base[unit],
724                                                   end*base[unit], "frequency"))
725        except RuntimeError, msg:
726            if rcParams['verbose']:
727                print msg
728                return
729            else: raise
730        s._add_history("lag_flag", varlist)
731        if insitu:
732            self._assign(s)
733        else:
734            return s
735
736    @print_log_dec
737    def create_mask(self, *args, **kwargs):
738        """
739        Compute and return a mask based on [min, max] windows.
740        The specified windows are to be INCLUDED, when the mask is
741        applied.
742        Parameters:
743            [min, max], [min2, max2], ...
744                Pairs of start/end points (inclusive)specifying the regions
745                to be masked
746            invert:     optional argument. If specified as True,
747                        return an inverted mask, i.e. the regions
748                        specified are EXCLUDED
749            row:        create the mask using the specified row for
750                        unit conversions, default is row=0
751                        only necessary if frequency varies over rows.
752        Example:
753            scan.set_unit('channel')
754            a)
755            msk = scan.create_mask([400, 500], [800, 900])
756            # masks everything outside 400 and 500
757            # and 800 and 900 in the unit 'channel'
758
759            b)
760            msk = scan.create_mask([400, 500], [800, 900], invert=True)
761            # masks the regions between 400 and 500
762            # and 800 and 900 in the unit 'channel'
763            c)
764            mask only channel 400
765            msk =  scan.create_mask([400])
766        """
767        row = kwargs.get("row", 0)
768        data = self._getabcissa(row)
769        u = self._getcoordinfo()[0]
770        if rcParams['verbose']:
771            if u == "": u = "channel"
772            msg = "The current mask window unit is %s" % u
773            i = self._check_ifs()
774            if not i:
775                msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
776            asaplog.push(msg)
777        n = self.nchan()
778        msk = _n_bools(n, False)
779        # test if args is a 'list' or a 'normal *args - UGLY!!!
780
781        ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
782             and args or args[0]
783        for window in ws:
784            if len(window) == 1:
785                window = [window[0], window[0]]
786            if len(window) == 0 or  len(window) > 2:
787                raise ValueError("A window needs to be defined as [start(, end)]")
788            if window[0] > window[1]:
789                tmp = window[0]
790                window[0] = window[1]
791                window[1] = tmp
792            for i in range(n):
793                if data[i] >= window[0] and data[i] <= window[1]:
794                    msk[i] = True
795        if kwargs.has_key('invert'):
796            if kwargs.get('invert'):
797                msk = mask_not(msk)
798        return msk
799
800    def get_restfreqs(self):
801        """
802        Get the restfrequency(s) stored in this scantable.
803        The return value(s) are always of unit 'Hz'
804        Parameters:
805            none
806        Returns:
807            a list of doubles
808        """
809        return list(self._getrestfreqs())
810
811
812    def set_restfreqs(self, freqs=None, unit='Hz'):
813        """
814        Set or replace the restfrequency specified and
815        If the 'freqs' argument holds a scalar,
816        then that rest frequency will be applied to all the selected
817        data.  If the 'freqs' argument holds
818        a vector, then it MUST be of equal or smaller length than
819        the number of IFs (and the available restfrequencies will be
820        replaced by this vector).  In this case, *all* data have
821        the restfrequency set per IF according
822        to the corresponding value you give in the 'freqs' vector.
823        E.g. 'freqs=[1e9, 2e9]'  would mean IF 0 gets restfreq 1e9 and
824        IF 1 gets restfreq 2e9.
825        You can also specify the frequencies via a linecatalog.
826
827        Parameters:
828            freqs:   list of rest frequency values or string idenitfiers
829            unit:    unit for rest frequency (default 'Hz')
830
831        Example:
832            # set the given restfrequency for the whole table
833            scan.set_restfreqs(freqs=1.4e9)
834            # If thee number of IFs in the data is >= 2 IF0 gets the first
835            # value IF1 the second...
836            scan.set_restfreqs(freqs=[1.4e9, 1.67e9])
837            #set the given restfrequency for the whole table (by name)
838            scan.set_restfreqs(freqs="OH1667")
839
840        Note:
841            To do more sophisticate Restfrequency setting, e.g. on a
842            source and IF basis, use scantable.set_selection() before using
843            this function.
844            # provide your scantable is called scan
845            selection = selector()
846            selection.set_name("ORION*")
847            selection.set_ifs([1])
848            scan.set_selection(selection)
849            scan.set_restfreqs(freqs=86.6e9)
850
851        """
852        varlist = vars()
853        from asap import linecatalog
854        # simple  value
855        if isinstance(freqs, int) or isinstance(freqs, float):
856            self._setrestfreqs(freqs, "",unit)
857        # list of values
858        elif isinstance(freqs, list) or isinstance(freqs, tuple):
859            # list values are scalars
860            if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
861                sel = selector()
862                savesel = self._getselection()
863                iflist = self.getifnos()
864                for i in xrange(len(freqs)):
865                    sel.set_ifs(iflist[i])
866                    self._setselection(sel)
867                    self._setrestfreqs(freqs[i], "",unit)
868                self._setselection(savesel)
869            # list values are tuples, (value, name)
870            elif isinstance(freqs[-1], dict):
871                sel = selector()
872                savesel = self._getselection()
873                iflist = self.getifnos()
874                for i in xrange(len(freqs)):
875                    sel.set_ifs(iflist[i])
876                    self._setselection(sel)
877                    self._setrestfreqs(freqs[i]["value"],
878                                       freqs[i]["name"], "MHz")
879                self._setselection(savesel)
880        # freqs are to be taken from a linecatalog
881        elif isinstance(freqs, linecatalog):
882            sel = selector()
883            savesel = self._getselection()
884            iflist = self.getifnos()
885            for i in xrange(freqs.nrow()):
886                sel.set_ifs(iflist[i])
887                self._setselection(sel)
888                self._setrestfreqs(freqs.get_frequency(i),
889                                   freqs.get_name(i), "MHz")
890                # ensure that we are not iterating past nIF
891                if i == self.nif()-1: break
892            self._setselection(savesel)
893        else:
894            return
895        self._add_history("set_restfreqs", varlist)
896
897    def shift_refpix(self, delta):
898        """
899        Shift the reference pixel of the Spectra Coordinate by an
900        integer amount.
901        Parameters:
902            delta:   the amount to shift by
903        Note:
904            Be careful using this with broadband data.
905        """
906        Scantable.shift(self, delta)
907
908    def history(self, filename=None):
909        """
910        Print the history. Optionally to a file.
911        Parameters:
912            filename:    The name  of the file to save the history to.
913        """
914        hist = list(self._gethistory())
915        out = "-"*80
916        for h in hist:
917            if h.startswith("---"):
918                out += "\n"+h
919            else:
920                items = h.split("##")
921                date = items[0]
922                func = items[1]
923                items = items[2:]
924                out += "\n"+date+"\n"
925                out += "Function: %s\n  Parameters:" % (func)
926                for i in items:
927                    s = i.split("=")
928                    out += "\n   %s = %s" % (s[0], s[1])
929                out += "\n"+"-"*80
930        if filename is not None:
931            if filename is "":
932                filename = 'scantable_history.txt'
933            import os
934            filename = os.path.expandvars(os.path.expanduser(filename))
935            if not os.path.isdir(filename):
936                data = open(filename, 'w')
937                data.write(out)
938                data.close()
939            else:
940                msg = "Illegal file name '%s'." % (filename)
941                if rcParams['verbose']:
942                    print msg
943                else:
944                    raise IOError(msg)
945        if rcParams['verbose']:
946            try:
947                from IPython.genutils import page as pager
948            except ImportError:
949                from pydoc import pager
950            pager(out)
951        else:
952            return out
953        return
954    #
955    # Maths business
956    #
957    @print_log_dec
958    def average_time(self, mask=None, scanav=False, weight='tint', align=False):
959        """
960        Return the (time) weighted average of a scan.
961        Note:
962            in channels only - align if necessary
963        Parameters:
964            mask:     an optional mask (only used for 'var' and 'tsys'
965                      weighting)
966            scanav:   True averages each scan separately
967                      False (default) averages all scans together,
968            weight:   Weighting scheme.
969                      'none'     (mean no weight)
970                      'var'      (1/var(spec) weighted)
971                      'tsys'     (1/Tsys**2 weighted)
972                      'tint'     (integration time weighted)
973                      'tintsys'  (Tint/Tsys**2)
974                      'median'   ( median averaging)
975                      The default is 'tint'
976            align:    align the spectra in velocity before averaging. It takes
977                      the time of the first spectrum as reference time.
978        Example:
979            # time average the scantable without using a mask
980            newscan = scan.average_time()
981        """
982        varlist = vars()
983        weight = weight or 'TINT'
984        mask = mask or ()
985        scanav = (scanav and 'SCAN') or 'NONE'
986        scan = (self, )
987        try:
988            if align:
989                scan = (self.freq_align(insitu=False), )
990            s = None
991            if weight.upper() == 'MEDIAN':
992                s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
993                                                         scanav))
994            else:
995                s = scantable(self._math._average(scan, mask, weight.upper(),
996                              scanav))
997        except RuntimeError, msg:
998            if rcParams['verbose']:
999                print msg
1000                return
1001            else: raise
1002        s._add_history("average_time", varlist)
1003        return s
1004
1005    @print_log_dec
1006    def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
1007        """
1008        Return a scan where all spectra are converted to either
1009        Jansky or Kelvin depending upon the flux units of the scan table.
1010        By default the function tries to look the values up internally.
1011        If it can't find them (or if you want to over-ride), you must
1012        specify EITHER jyperk OR eta (and D which it will try to look up
1013        also if you don't set it). jyperk takes precedence if you set both.
1014        Parameters:
1015            jyperk:      the Jy / K conversion factor
1016            eta:         the aperture efficiency
1017            d:           the geomtric diameter (metres)
1018            insitu:      if False a new scantable is returned.
1019                         Otherwise, the scaling is done in-situ
1020                         The default is taken from .asaprc (False)
1021        """
1022        if insitu is None: insitu = rcParams['insitu']
1023        self._math._setinsitu(insitu)
1024        varlist = vars()
1025        jyperk = jyperk or -1.0
1026        d = d or -1.0
1027        eta = eta or -1.0
1028        s = scantable(self._math._convertflux(self, d, eta, jyperk))
1029        s._add_history("convert_flux", varlist)
1030        if insitu: self._assign(s)
1031        else: return s
1032
1033    @print_log_dec
1034    def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1035        """
1036        Return a scan after applying a gain-elevation correction.
1037        The correction can be made via either a polynomial or a
1038        table-based interpolation (and extrapolation if necessary).
1039        You specify polynomial coefficients, an ascii table or neither.
1040        If you specify neither, then a polynomial correction will be made
1041        with built in coefficients known for certain telescopes (an error
1042        will occur if the instrument is not known).
1043        The data and Tsys are *divided* by the scaling factors.
1044        Parameters:
1045            poly:        Polynomial coefficients (default None) to compute a
1046                         gain-elevation correction as a function of
1047                         elevation (in degrees).
1048            filename:    The name of an ascii file holding correction factors.
1049                         The first row of the ascii file must give the column
1050                         names and these MUST include columns
1051                         "ELEVATION" (degrees) and "FACTOR" (multiply data
1052                         by this) somewhere.
1053                         The second row must give the data type of the
1054                         column. Use 'R' for Real and 'I' for Integer.
1055                         An example file would be
1056                         (actual factors are arbitrary) :
1057
1058                         TIME ELEVATION FACTOR
1059                         R R R
1060                         0.1 0 0.8
1061                         0.2 20 0.85
1062                         0.3 40 0.9
1063                         0.4 60 0.85
1064                         0.5 80 0.8
1065                         0.6 90 0.75
1066            method:      Interpolation method when correcting from a table.
1067                         Values are  "nearest", "linear" (default), "cubic"
1068                         and "spline"
1069            insitu:      if False a new scantable is returned.
1070                         Otherwise, the scaling is done in-situ
1071                         The default is taken from .asaprc (False)
1072        """
1073
1074        if insitu is None: insitu = rcParams['insitu']
1075        self._math._setinsitu(insitu)
1076        varlist = vars()
1077        poly = poly or ()
1078        from os.path import expandvars
1079        filename = expandvars(filename)
1080        s = scantable(self._math._gainel(self, poly, filename, method))
1081        s._add_history("gain_el", varlist)
1082        if insitu:
1083            self._assign(s)
1084        else:
1085            return s
1086
1087    @print_log_dec
1088    def freq_align(self, reftime=None, method='cubic', insitu=None):
1089        """
1090        Return a scan where all rows have been aligned in frequency/velocity.
1091        The alignment frequency frame (e.g. LSRK) is that set by function
1092        set_freqframe.
1093        Parameters:
1094            reftime:     reference time to align at. By default, the time of
1095                         the first row of data is used.
1096            method:      Interpolation method for regridding the spectra.
1097                         Choose from "nearest", "linear", "cubic" (default)
1098                         and "spline"
1099            insitu:      if False a new scantable is returned.
1100                         Otherwise, the scaling is done in-situ
1101                         The default is taken from .asaprc (False)
1102        """
1103        if insitu is None: insitu = rcParams["insitu"]
1104        self._math._setinsitu(insitu)
1105        varlist = vars()
1106        reftime = reftime or ""
1107        s = scantable(self._math._freq_align(self, reftime, method))
1108        s._add_history("freq_align", varlist)
1109        if insitu: self._assign(s)
1110        else: return s
1111
1112    @print_log_dec
1113    def opacity(self, tau, insitu=None):
1114        """
1115        Apply an opacity correction. The data
1116        and Tsys are multiplied by the correction factor.
1117        Parameters:
1118            tau:         (list of) opacity from which the correction factor is
1119                         exp(tau*ZD)
1120                         where ZD is the zenith-distance.
1121                         If a list is provided, it has to be of length nIF,
1122                         nIF*nPol or 1 and in order of IF/POL, e.g.
1123                         [opif0pol0, opif0pol1, opif1pol0 ...]
1124            insitu:      if False a new scantable is returned.
1125                         Otherwise, the scaling is done in-situ
1126                         The default is taken from .asaprc (False)
1127        """
1128        if insitu is None: insitu = rcParams['insitu']
1129        self._math._setinsitu(insitu)
1130        varlist = vars()
1131        if not hasattr(tau, "__len__"):
1132            tau = [tau]
1133        s = scantable(self._math._opacity(self, tau))
1134        s._add_history("opacity", varlist)
1135        if insitu: self._assign(s)
1136        else: return s
1137
1138    @print_log_dec
1139    def bin(self, width=5, insitu=None):
1140        """
1141        Return a scan where all spectra have been binned up.
1142        Parameters:
1143            width:       The bin width (default=5) in pixels
1144            insitu:      if False a new scantable is returned.
1145                         Otherwise, the scaling is done in-situ
1146                         The default is taken from .asaprc (False)
1147        """
1148        if insitu is None: insitu = rcParams['insitu']
1149        self._math._setinsitu(insitu)
1150        varlist = vars()
1151        s = scantable(self._math._bin(self, width))
1152        s._add_history("bin", varlist)
1153        if insitu:
1154            self._assign(s)
1155        else:
1156            return s
1157
1158    @print_log_dec
1159    def resample(self, width=5, method='cubic', insitu=None):
1160        """
1161        Return a scan where all spectra have been binned up.
1162
1163        Parameters:
1164            width:       The bin width (default=5) in pixels
1165            method:      Interpolation method when correcting from a table.
1166                         Values are  "nearest", "linear", "cubic" (default)
1167                         and "spline"
1168            insitu:      if False a new scantable is returned.
1169                         Otherwise, the scaling is done in-situ
1170                         The default is taken from .asaprc (False)
1171        """
1172        if insitu is None: insitu = rcParams['insitu']
1173        self._math._setinsitu(insitu)
1174        varlist = vars()
1175        s = scantable(self._math._resample(self, method, width))
1176        s._add_history("resample", varlist)
1177        if insitu: self._assign(s)
1178        else: return s
1179
1180    @print_log_dec
1181    def average_pol(self, mask=None, weight='none'):
1182        """
1183        Average the Polarisations together.
1184        Parameters:
1185            mask:        An optional mask defining the region, where the
1186                         averaging will be applied. The output will have all
1187                         specified points masked.
1188            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1189                         weighted), or 'tsys' (1/Tsys**2 weighted)
1190        """
1191        varlist = vars()
1192        mask = mask or ()
1193        s = scantable(self._math._averagepol(self, mask, weight.upper()))
1194        s._add_history("average_pol", varlist)
1195        return s
1196
1197    @print_log_dec
1198    def average_beam(self, mask=None, weight='none'):
1199        """
1200        Average the Beams together.
1201        Parameters:
1202            mask:        An optional mask defining the region, where the
1203                         averaging will be applied. The output will have all
1204                         specified points masked.
1205            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1206                         weighted), or 'tsys' (1/Tsys**2 weighted)
1207        """
1208        varlist = vars()
1209        mask = mask or ()
1210        s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1211        s._add_history("average_beam", varlist)
1212        return s
1213
1214    def parallactify(self, pflag):
1215        """
1216        Set a flag to inidcate whether this data should be treated as having
1217        been 'parallactified' (total phase == 0.0)
1218        Parameters:
1219            pflag:  Bool inidcating whether to turn this on (True) or
1220                    off (False)
1221        """
1222        varlist = vars()
1223        self._parallactify(pflag)
1224        self._add_history("parallactify", varlist)
1225
1226    @print_log_dec
1227    def convert_pol(self, poltype=None):
1228        """
1229        Convert the data to a different polarisation type.
1230        Note that you will need cross-polarisation terms for most conversions.
1231        Parameters:
1232            poltype:    The new polarisation type. Valid types are:
1233                        "linear", "circular", "stokes" and "linpol"
1234        """
1235        varlist = vars()
1236        try:
1237            s = scantable(self._math._convertpol(self, poltype))
1238        except RuntimeError, msg:
1239            if rcParams['verbose']:
1240                print msg
1241                return
1242            else:
1243                raise
1244        s._add_history("convert_pol", varlist)
1245        return s
1246
1247    @print_log_dec
1248    def smooth(self, kernel="hanning", width=5.0, order=2, insitu=None):
1249        """
1250        Smooth the spectrum by the specified kernel (conserving flux).
1251        Parameters:
1252            kernel:     The type of smoothing kernel. Select from
1253                        'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1254                        or 'poly'
1255            width:      The width of the kernel in pixels. For hanning this is
1256                        ignored otherwise it defauls to 5 pixels.
1257                        For 'gaussian' it is the Full Width Half
1258                        Maximum. For 'boxcar' it is the full width.
1259                        For 'rmedian' and 'poly' it is the half width.
1260            order:      Optional parameter for 'poly' kernel (default is 2), to
1261                        specify the order of the polnomial. Ignored by all other
1262                        kernels.
1263            insitu:     if False a new scantable is returned.
1264                        Otherwise, the scaling is done in-situ
1265                        The default is taken from .asaprc (False)
1266        Example:
1267             none
1268        """
1269        if insitu is None: insitu = rcParams['insitu']
1270        self._math._setinsitu(insitu)
1271        varlist = vars()
1272        s = scantable(self._math._smooth(self, kernel.lower(), width, order))
1273        s._add_history("smooth", varlist)
1274        if insitu: self._assign(s)
1275        else: return s
1276
1277    @print_log_dec
1278    def poly_baseline(self, mask=None, order=0, plot=False, uselin=False,
1279                      insitu=None):
1280        """
1281        Return a scan which has been baselined (all rows) by a polynomial.
1282        Parameters:
1283            mask:       an optional mask
1284            order:      the order of the polynomial (default is 0)
1285            plot:       plot the fit and the residual. In this each
1286                        indivual fit has to be approved, by typing 'y'
1287                        or 'n'
1288            uselin:     use linear polynomial fit
1289            insitu:     if False a new scantable is returned.
1290                        Otherwise, the scaling is done in-situ
1291                        The default is taken from .asaprc (False)
1292        Example:
1293            # return a scan baselined by a third order polynomial,
1294            # not using a mask
1295            bscan = scan.poly_baseline(order=3)
1296        """
1297        if insitu is None: insitu = rcParams['insitu']
1298        varlist = vars()
1299        if mask is None:
1300            mask = [True for i in xrange(self.nchan(-1))]
1301        from asap.asapfitter import fitter
1302        try:
1303            f = fitter()
1304            f.set_scan(self, mask)
1305            #f.set_function(poly=order)
1306            if uselin:
1307                f.set_function(lpoly=order)
1308            else:
1309                f.set_function(poly=order)
1310            s = f.auto_fit(insitu, plot=plot)
1311            s._add_history("poly_baseline", varlist)
1312            if insitu: self._assign(s)
1313            else: return s
1314        except RuntimeError:
1315            msg = "The fit failed, possibly because it didn't converge."
1316            if rcParams['verbose']:
1317                print msg
1318                return
1319            else:
1320                raise RuntimeError(msg)
1321
1322    def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
1323                           threshold=3, chan_avg_limit=1, plot=False,
1324                           insitu=None):
1325        """
1326        Return a scan which has been baselined (all rows) by a polynomial.
1327        Spectral lines are detected first using linefinder and masked out
1328        to avoid them affecting the baseline solution.
1329
1330        Parameters:
1331            mask:       an optional mask retreived from scantable
1332            edge:       an optional number of channel to drop at
1333                        the edge of spectrum. If only one value is
1334                        specified, the same number will be dropped from
1335                        both sides of the spectrum. Default is to keep
1336                        all channels. Nested tuples represent individual
1337                        edge selection for different IFs (a number of spectral
1338                        channels can be different)
1339            order:      the order of the polynomial (default is 0)
1340            threshold:  the threshold used by line finder. It is better to
1341                        keep it large as only strong lines affect the
1342                        baseline solution.
1343            chan_avg_limit:
1344                        a maximum number of consequtive spectral channels to
1345                        average during the search of weak and broad lines.
1346                        The default is no averaging (and no search for weak
1347                        lines). If such lines can affect the fitted baseline
1348                        (e.g. a high order polynomial is fitted), increase this
1349                        parameter (usually values up to 8 are reasonable). Most
1350                        users of this method should find the default value
1351                        sufficient.
1352            plot:       plot the fit and the residual. In this each
1353                        indivual fit has to be approved, by typing 'y'
1354                        or 'n'
1355            insitu:     if False a new scantable is returned.
1356                        Otherwise, the scaling is done in-situ
1357                        The default is taken from .asaprc (False)
1358
1359        Example:
1360            scan2=scan.auto_poly_baseline(order=7)
1361        """
1362        if insitu is None: insitu = rcParams['insitu']
1363        varlist = vars()
1364        from asap.asapfitter import fitter
1365        from asap.asaplinefind import linefinder
1366        from asap import _is_sequence_or_number as _is_valid
1367
1368        # check whether edge is set up for each IF individually
1369        individualedge = False;
1370        if len(edge) > 1:
1371            if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1372                individualedge = True;
1373
1374        if not _is_valid(edge, int) and not individualedge:
1375            raise ValueError, "Parameter 'edge' has to be an integer or a \
1376            pair of integers specified as a tuple. Nested tuples are allowed \
1377            to make individual selection for different IFs."
1378
1379        curedge = (0, 0)
1380        if individualedge:
1381            for edgepar in edge:
1382                if not _is_valid(edgepar, int):
1383                    raise ValueError, "Each element of the 'edge' tuple has \
1384                                       to be a pair of integers or an integer."
1385        else:
1386            curedge = edge;
1387
1388        # setup fitter
1389        f = fitter()
1390        f.set_function(poly=order)
1391
1392        # setup line finder
1393        fl = linefinder()
1394        fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
1395
1396        if not insitu:
1397            workscan = self.copy()
1398        else:
1399            workscan = self
1400
1401        fl.set_scan(workscan)
1402
1403        rows = range(workscan.nrow())
1404        asaplog.push("Processing:")
1405        for r in rows:
1406            msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1407                (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1408                 workscan.getpol(r), workscan.getcycle(r))
1409            asaplog.push(msg, False)
1410
1411            # figure out edge parameter
1412            if individualedge:
1413                if len(edge) >= workscan.getif(r):
1414                    raise RuntimeError, "Number of edge elements appear to " \
1415                                        "be less than the number of IFs"
1416                    curedge = edge[workscan.getif(r)]
1417
1418            # setup line finder
1419            fl.find_lines(r, mask, curedge)
1420            f.set_data(workscan._getabcissa(r),  workscan._getspectrum(r),
1421                        mask_and(workscan._getmask(r), fl.get_mask()))
1422            f.fit()
1423            x = f.get_parameters()
1424            if plot:
1425                f.plot(residual=True)
1426                x = raw_input("Accept fit ( [y]/n ): ")
1427                if x.upper() == 'N':
1428                    continue
1429            workscan._setspectrum(f.fitter.getresidual(), r)
1430        if plot:
1431            f._p.unmap()
1432            f._p = None
1433        workscan._add_history("auto_poly_baseline", varlist)
1434        if insitu:
1435            self._assign(workscan)
1436        else:
1437            return workscan
1438
1439    @print_log_dec
1440    def rotate_linpolphase(self, angle):
1441        """
1442        Rotate the phase of the complex polarization O=Q+iU correlation.
1443        This is always done in situ in the raw data.  So if you call this
1444        function more than once then each call rotates the phase further.
1445        Parameters:
1446            angle:   The angle (degrees) to rotate (add) by.
1447        Examples:
1448            scan.rotate_linpolphase(2.3)
1449        """
1450        varlist = vars()
1451        self._math._rotate_linpolphase(self, angle)
1452        self._add_history("rotate_linpolphase", varlist)
1453        return
1454
1455    @print_log_dec
1456    def rotate_xyphase(self, angle):
1457        """
1458        Rotate the phase of the XY correlation.  This is always done in situ
1459        in the data.  So if you call this function more than once
1460        then each call rotates the phase further.
1461        Parameters:
1462            angle:   The angle (degrees) to rotate (add) by.
1463        Examples:
1464            scan.rotate_xyphase(2.3)
1465        """
1466        varlist = vars()
1467        self._math._rotate_xyphase(self, angle)
1468        self._add_history("rotate_xyphase", varlist)
1469        return
1470
1471    @print_log_dec
1472    def swap_linears(self):
1473        """
1474        Swap the linear polarisations XX and YY, or better the first two
1475        polarisations as this also works for ciculars.
1476        """
1477        varlist = vars()
1478        self._math._swap_linears(self)
1479        self._add_history("swap_linears", varlist)
1480        return
1481
1482    @print_log_dec
1483    def invert_phase(self):
1484        """
1485        Invert the phase of the complex polarisation
1486        """
1487        varlist = vars()
1488        self._math._invert_phase(self)
1489        self._add_history("invert_phase", varlist)
1490        return
1491
1492    @print_log_dec
1493    def add(self, offset, insitu=None):
1494        """
1495        Return a scan where all spectra have the offset added
1496        Parameters:
1497            offset:      the offset
1498            insitu:      if False a new scantable is returned.
1499                         Otherwise, the scaling is done in-situ
1500                         The default is taken from .asaprc (False)
1501        """
1502        if insitu is None: insitu = rcParams['insitu']
1503        self._math._setinsitu(insitu)
1504        varlist = vars()
1505        s = scantable(self._math._unaryop(self, offset, "ADD", False))
1506        s._add_history("add", varlist)
1507        if insitu:
1508            self._assign(s)
1509        else:
1510            return s
1511
1512    @print_log_dec
1513    def scale(self, factor, tsys=True, insitu=None):
1514        """
1515        Return a scan where all spectra are scaled by the give 'factor'
1516        Parameters:
1517            factor:      the scaling factor
1518            insitu:      if False a new scantable is returned.
1519                         Otherwise, the scaling is done in-situ
1520                         The default is taken from .asaprc (False)
1521            tsys:        if True (default) then apply the operation to Tsys
1522                         as well as the data
1523        """
1524        if insitu is None: insitu = rcParams['insitu']
1525        self._math._setinsitu(insitu)
1526        varlist = vars()
1527        s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
1528        s._add_history("scale", varlist)
1529        if insitu:
1530            self._assign(s)
1531        else:
1532            return s
1533
1534    def set_sourcetype(self, match, matchtype="pattern",
1535                       sourcetype="reference"):
1536        """
1537        Set the type of the source to be an source or reference scan
1538        using the provided pattern:
1539        Parameters:
1540            match:          a Unix style pattern, regular expression or selector
1541            matchtype:      'pattern' (default) UNIX style pattern or
1542                            'regex' regular expression
1543            sourcetype:     the type of the source to use (source/reference)
1544        """
1545        varlist = vars()
1546        basesel = self.get_selection()
1547        stype = -1
1548        if sourcetype.lower().startswith("r"):
1549            stype = 1
1550        elif sourcetype.lower().startswith("s"):
1551            stype = 0
1552        else:
1553            raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
1554        if matchtype.lower().startswith("p"):
1555            matchtype = "pattern"
1556        elif matchtype.lower().startswith("r"):
1557            matchtype = "regex"
1558        else:
1559            raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
1560        sel = selector()
1561        if isinstance(match, selector):
1562            sel = match
1563        else:
1564            sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
1565        self.set_selection(basesel+sel)
1566        self._setsourcetype(stype)
1567        self.set_selection(basesel)
1568        self._add_history("set_sourcetype", varlist)
1569
1570    @print_log_dec
1571    def auto_quotient(self, preserve=True, mode='paired'):
1572        """
1573        This function allows to build quotients automatically.
1574        It assumes the observation to have the same numer of
1575        "ons" and "offs"
1576        Parameters:
1577            preserve:       you can preserve (default) the continuum or
1578                            remove it.  The equations used are
1579                            preserve: Output = Toff * (on/off) - Toff
1580                            remove:   Output = Toff * (on/off) - Ton
1581            mode:           the on/off detection mode
1582                            'paired' (default)
1583                            identifies 'off' scans by the
1584                            trailing '_R' (Mopra/Parkes) or
1585                            '_e'/'_w' (Tid) and matches
1586                            on/off pairs from the observing pattern
1587                            'time'
1588                            finds the closest off in time
1589
1590        """
1591        modes = ["time", "paired"]
1592        if not mode in modes:
1593            msg = "please provide valid mode. Valid modes are %s" % (modes)
1594            raise ValueError(msg)
1595        varlist = vars()
1596        s = None
1597        if mode.lower() == "paired":
1598            basesel = self.get_selection()
1599            sel = selector()+basesel
1600            sel.set_query("SRCTYPE==1")
1601            self.set_selection(sel)
1602            offs = self.copy()
1603            sel.set_query("SRCTYPE==0")
1604            self.set_selection(sel)
1605            ons = self.copy()
1606            s = scantable(self._math._quotient(ons, offs, preserve))
1607            self.set_selection(basesel)
1608        elif mode.lower() == "time":
1609            s = scantable(self._math._auto_quotient(self, mode, preserve))
1610        s._add_history("auto_quotient", varlist)
1611        return s
1612
1613    @print_log_dec
1614    def mx_quotient(self, mask = None, weight='median', preserve=True):
1615        """
1616        Form a quotient using "off" beams when observing in "MX" mode.
1617        Parameters:
1618            mask:           an optional mask to be used when weight == 'stddev'
1619            weight:         How to average the off beams.  Default is 'median'.
1620            preserve:       you can preserve (default) the continuum or
1621                            remove it.  The equations used are
1622                            preserve: Output = Toff * (on/off) - Toff
1623                            remove:   Output = Toff * (on/off) - Ton
1624        """
1625        mask = mask or ()
1626        varlist = vars()
1627        on = scantable(self._math._mx_extract(self, 'on'))
1628        preoff = scantable(self._math._mx_extract(self, 'off'))
1629        off = preoff.average_time(mask=mask, weight=weight, scanav=False)
1630        from asapmath  import quotient
1631        q = quotient(on, off, preserve)
1632        q._add_history("mx_quotient", varlist)
1633        return q
1634
1635    @print_log_dec
1636    def freq_switch(self, insitu=None):
1637        """
1638        Apply frequency switching to the data.
1639        Parameters:
1640            insitu:      if False a new scantable is returned.
1641                         Otherwise, the swictching is done in-situ
1642                         The default is taken from .asaprc (False)
1643        Example:
1644            none
1645        """
1646        if insitu is None: insitu = rcParams['insitu']
1647        self._math._setinsitu(insitu)
1648        varlist = vars()
1649        s = scantable(self._math._freqswitch(self))
1650        s._add_history("freq_switch", varlist)
1651        if insitu: self._assign(s)
1652        else: return s
1653
1654    @print_log_dec
1655    def recalc_azel(self):
1656        """
1657        Recalculate the azimuth and elevation for each position.
1658        Parameters:
1659            none
1660        Example:
1661        """
1662        varlist = vars()
1663        self._recalcazel()
1664        self._add_history("recalc_azel", varlist)
1665        return
1666
1667    @print_log_dec
1668    def __add__(self, other):
1669        varlist = vars()
1670        s = None
1671        if isinstance(other, scantable):
1672            s = scantable(self._math._binaryop(self, other, "ADD"))
1673        elif isinstance(other, float):
1674            s = scantable(self._math._unaryop(self, other, "ADD", False))
1675        else:
1676            raise TypeError("Other input is not a scantable or float value")
1677        s._add_history("operator +", varlist)
1678        return s
1679
1680    @print_log_dec
1681    def __sub__(self, other):
1682        """
1683        implicit on all axes and on Tsys
1684        """
1685        varlist = vars()
1686        s = None
1687        if isinstance(other, scantable):
1688            s = scantable(self._math._binaryop(self, other, "SUB"))
1689        elif isinstance(other, float):
1690            s = scantable(self._math._unaryop(self, other, "SUB", False))
1691        else:
1692            raise TypeError("Other input is not a scantable or float value")
1693        s._add_history("operator -", varlist)
1694        return s
1695
1696    @print_log_dec
1697    def __mul__(self, other):
1698        """
1699        implicit on all axes and on Tsys
1700        """
1701        varlist = vars()
1702        s = None
1703        if isinstance(other, scantable):
1704            s = scantable(self._math._binaryop(self, other, "MUL"))
1705        elif isinstance(other, float):
1706            s = scantable(self._math._unaryop(self, other, "MUL", False))
1707        else:
1708            raise TypeError("Other input is not a scantable or float value")
1709        s._add_history("operator *", varlist)
1710        return s
1711
1712
1713    @print_log_dec
1714    def __div__(self, other):
1715        """
1716        implicit on all axes and on Tsys
1717        """
1718        varlist = vars()
1719        s = None
1720        if isinstance(other, scantable):
1721            s = scantable(self._math._binaryop(self, other, "DIV"))
1722        elif isinstance(other, float):
1723            if other == 0.0:
1724                raise ZeroDivisionError("Dividing by zero is not recommended")
1725            s = scantable(self._math._unaryop(self, other, "DIV", False))
1726        else:
1727            raise TypeError("Other input is not a scantable or float value")
1728        s._add_history("operator /", varlist)
1729        return s
1730
1731    def get_fit(self, row=0):
1732        """
1733        Print or return the stored fits for a row in the scantable
1734        Parameters:
1735            row:    the row which the fit has been applied to.
1736        """
1737        if row > self.nrow():
1738            return
1739        from asap.asapfit import asapfit
1740        fit = asapfit(self._getfit(row))
1741        if rcParams['verbose']:
1742            print fit
1743            return
1744        else:
1745            return fit.as_dict()
1746
1747    def flag_nans(self):
1748        """
1749        Utility function to flag NaN values in the scantable.
1750        """
1751        import numpy
1752        basesel = self.get_selection()
1753        for i in range(self.nrow()):
1754            sel = self.get_row_selector(i)
1755            self.set_selection(basesel+sel)
1756            nans = numpy.isnan(self._getspectrum(0))
1757        if numpy.any(nans):
1758            bnans = [ bool(v) for v in nans]
1759            self.flag(bnans)
1760        self.set_selection(basesel)
1761
1762    def get_row_selector(self, rowno):
1763        return selector(beams=self.getbeam(rowno),
1764                        ifs=self.getif(rowno),
1765                        pols=self.getpol(rowno),
1766                        scans=self.getscan(rowno),
1767                        cycles=self.getcycle(rowno))
1768
1769    def _add_history(self, funcname, parameters):
1770        if not rcParams['scantable.history']:
1771            return
1772        # create date
1773        sep = "##"
1774        from datetime import datetime
1775        dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1776        hist = dstr+sep
1777        hist += funcname+sep#cdate+sep
1778        if parameters.has_key('self'): del parameters['self']
1779        for k, v in parameters.iteritems():
1780            if type(v) is dict:
1781                for k2, v2 in v.iteritems():
1782                    hist += k2
1783                    hist += "="
1784                    if isinstance(v2, scantable):
1785                        hist += 'scantable'
1786                    elif k2 == 'mask':
1787                        if isinstance(v2, list) or isinstance(v2, tuple):
1788                            hist += str(self._zip_mask(v2))
1789                        else:
1790                            hist += str(v2)
1791                    else:
1792                        hist += str(v2)
1793            else:
1794                hist += k
1795                hist += "="
1796                if isinstance(v, scantable):
1797                    hist += 'scantable'
1798                elif k == 'mask':
1799                    if isinstance(v, list) or isinstance(v, tuple):
1800                        hist += str(self._zip_mask(v))
1801                    else:
1802                        hist += str(v)
1803                else:
1804                    hist += str(v)
1805            hist += sep
1806        hist = hist[:-2] # remove trailing '##'
1807        self._addhistory(hist)
1808
1809
1810    def _zip_mask(self, mask):
1811        mask = list(mask)
1812        i = 0
1813        segments = []
1814        while mask[i:].count(1):
1815            i += mask[i:].index(1)
1816            if mask[i:].count(0):
1817                j = i + mask[i:].index(0)
1818            else:
1819                j = len(mask)
1820            segments.append([i, j])
1821            i = j
1822        return segments
1823
1824    def _get_ordinate_label(self):
1825        fu = "("+self.get_fluxunit()+")"
1826        import re
1827        lbl = "Intensity"
1828        if re.match(".K.", fu):
1829            lbl = "Brightness Temperature "+ fu
1830        elif re.match(".Jy.", fu):
1831            lbl = "Flux density "+ fu
1832        return lbl
1833
1834    def _check_ifs(self):
1835        nchans = [self.nchan(i) for i in range(self.nif(-1))]
1836        nchans = filter(lambda t: t > 0, nchans)
1837        return (sum(nchans)/len(nchans) == nchans[0])
1838
1839    def _fill(self, names, unit, average):
1840        import os
1841        from asap._asap import stfiller
1842        first = True
1843        fullnames = []
1844        for name in names:
1845            name = os.path.expandvars(name)
1846            name = os.path.expanduser(name)
1847            if not os.path.exists(name):
1848                msg = "File '%s' does not exists" % (name)
1849                if rcParams['verbose']:
1850                    asaplog.push(msg)
1851                    print asaplog.pop().strip()
1852                    return
1853                raise IOError(msg)
1854            fullnames.append(name)
1855        if average:
1856            asaplog.push('Auto averaging integrations')
1857        stype = int(rcParams['scantable.storage'].lower() == 'disk')
1858        for name in fullnames:
1859            tbl = Scantable(stype)
1860            r = stfiller(tbl)
1861            rx = rcParams['scantable.reference']
1862            r._setreferenceexpr(rx)
1863            msg = "Importing %s..." % (name)
1864            asaplog.push(msg, False)
1865            print_log()
1866            r._open(name, -1, -1)
1867            r._read()
1868            if average:
1869                tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
1870            if not first:
1871                tbl = self._math._merge([self, tbl])
1872            Scantable.__init__(self, tbl)
1873            r._close()
1874            del r, tbl
1875            first = False
1876        if unit is not None:
1877            self.set_fluxunit(unit)
1878        self.set_freqframe(rcParams['scantable.freqframe'])
1879
1880    def __getitem__(self, key):
1881        if key < 0:
1882            key += self.nrow()
1883        if key >= self.nrow():
1884            raise IndexError("Row index out of range.")
1885        return self._getspectrum(key)
1886
1887    def __setitem__(self, key, value):
1888        if key < 0:
1889            key += self.nrow()
1890        if key >= self.nrow():
1891            raise IndexError("Row index out of range.")
1892        if not hasattr(value, "__len__") or \
1893                len(value) > self.nchan(self.getif(key)):
1894            raise ValueError("Spectrum length doesn't match.")
1895        return self._setspectrum(value, key)
1896
1897    def __len__(self):
1898        return self.nrow()
1899
1900    def __iter__(self):
1901        for i in range(len(self)):
1902            yield self[i]
Note: See TracBrowser for help on using the repository browser.