source: trunk/python/scantable.py @ 1725

Last change on this file since 1725 was 1725, checked in by Malte Marquarding, 14 years ago

Finishing touches to opacity calculations, docs, plotting and model

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 70.7 KB
Line 
1import os
2try:
3    from functools import wraps as wraps_dec
4except ImportError:
5    from asap.compatibility import wraps as wraps_dec
6
7from asap._asap import Scantable
8from asap import rcParams
9from asap import print_log, print_log_dec
10from asap import asaplog
11from asap import selector
12from asap import linecatalog
13from asap.coordinate import coordinate
14from asap import _n_bools, mask_not, mask_and, mask_or
15
16
17def preserve_selection(func):
18    @wraps_dec(func)
19    def wrap(obj, *args, **kw):
20        basesel = obj.get_selection()
21        val = func(obj, *args, **kw)
22        obj.set_selection(basesel)
23        return val
24    return wrap
25
26
27def is_scantable(filename):
28    return (os.path.isdir(filename)
29            and not os.path.exists(filename+'/table.f1')
30            and os.path.exists(filename+'/table.info'))
31
32
33class scantable(Scantable):
34    """
35        The ASAP container for scans
36    """
37    @print_log_dec
38    def __init__(self, filename, average=None, unit=None, parallactify=None):
39        """
40        Create a scantable from a saved one or make a reference
41        Parameters:
42            filename:    the name of an asap table on disk
43                         or
44                         the name of a rpfits/sdfits/ms file
45                         (integrations within scans are auto averaged
46                         and the whole file is read)
47                         or
48                         [advanced] a reference to an existing
49                         scantable
50            average:     average all integrations withinb a scan on read.
51                         The default (True) is taken from .asaprc.
52            unit:         brightness unit; must be consistent with K or Jy.
53                         Over-rides the default selected by the reader
54                         (input rpfits/sdfits/ms) or replaces the value
55                         in existing scantables
56            parallactify: Indcicate that the data had been parallatified.
57                          Default is taken form rc file.
58        """
59        if average is None:
60            average = rcParams['scantable.autoaverage']
61        parallactify = parallactify or rcParams['scantable.parallactify']
62        varlist = vars()
63        from asap._asap import stmath
64        self._math = stmath()
65        if isinstance(filename, Scantable):
66            Scantable.__init__(self, filename)
67        else:
68            if isinstance(filename, str):
69                filename = os.path.expandvars(filename)
70                filename = os.path.expanduser(filename)
71                if not os.path.exists(filename):
72                    s = "File '%s' not found." % (filename)
73                    if rcParams['verbose']:
74                        asaplog.push(s)
75                        return
76                    raise IOError(s)
77                if is_scantable(filename):
78                    ondisk = rcParams['scantable.storage'] == 'disk'
79                    Scantable.__init__(self, filename, ondisk)
80                    if unit is not None:
81                        self.set_fluxunit(unit)
82                    self.set_freqframe(rcParams['scantable.freqframe'])
83                else:
84                    self._fill([filename], unit, average)
85            elif (isinstance(filename, list) or isinstance(filename, tuple)) \
86                  and isinstance(filename[-1], str):
87                self._fill(filename, unit, average)
88        self.parallactify(parallactify)
89        self._add_history("scantable", varlist)
90
91    @print_log_dec
92    def save(self, name=None, format=None, overwrite=False):
93        """
94        Store the scantable on disk. This can be an asap (aips++) Table,
95        SDFITS or MS2 format.
96        Parameters:
97            name:        the name of the outputfile. For format "ASCII"
98                         this is the root file name (data in 'name'.txt
99                         and header in 'name'_header.txt)
100            format:      an optional file format. Default is ASAP.
101                         Allowed are - 'ASAP' (save as ASAP [aips++] Table),
102                                       'SDFITS' (save as SDFITS file)
103                                       'ASCII' (saves as ascii text file)
104                                       'MS2' (saves as an aips++
105                                              MeasurementSet V2)
106                                       'FITS' (save as image FITS - not
107                                               readable by class)
108                                       'CLASS' (save as FITS readable by CLASS)
109            overwrite:   If the file should be overwritten if it exists.
110                         The default False is to return with warning
111                         without writing the output. USE WITH CARE.
112        Example:
113            scan.save('myscan.asap')
114            scan.save('myscan.sdfits', 'SDFITS')
115        """
116        from os import path
117        format = format or rcParams['scantable.save']
118        suffix = '.'+format.lower()
119        if name is None or name == "":
120            name = 'scantable'+suffix
121            msg = "No filename given. Using default name %s..." % name
122            asaplog.push(msg)
123        name = path.expandvars(name)
124        if path.isfile(name) or path.isdir(name):
125            if not overwrite:
126                msg = "File %s exists." % name
127                if rcParams['verbose']:
128                    print msg
129                    return
130                else:
131                    raise IOError(msg)
132        format2 = format.upper()
133        if format2 == 'ASAP':
134            self._save(name)
135        else:
136            from asap._asap import stwriter as stw
137            writer = stw(format2)
138            writer.write(self, name)
139        return
140
141    def copy(self):
142        """
143        Return a copy of this scantable.
144        Note:
145            This makes a full (deep) copy. scan2 = scan1 makes a reference.
146        Parameters:
147            none
148        Example:
149            copiedscan = scan.copy()
150        """
151        sd = scantable(Scantable._copy(self))
152        return sd
153
154    def drop_scan(self, scanid=None):
155        """
156        Return a new scantable where the specified scan number(s) has(have)
157        been dropped.
158        Parameters:
159            scanid:    a (list of) scan number(s)
160        """
161        from asap import _is_sequence_or_number as _is_valid
162        from asap import _to_list
163        from asap import unique
164        if not _is_valid(scanid):
165            if rcParams['verbose']:
166                print "Please specify a scanno to drop from the scantable"
167                return
168            else:
169                raise RuntimeError("No scan given")
170        try:
171            scanid = _to_list(scanid)
172            allscans = unique([ self.getscan(i) for i in range(self.nrow())])
173            for sid in scanid: allscans.remove(sid)
174            if len(allscans) == 0:
175                raise ValueError("Can't remove all scans")
176        except ValueError:
177            if rcParams['verbose']:
178                print "Couldn't find any match."
179                return
180            else: raise
181        try:
182            sel = selector(scans=allscans)
183            return self._select_copy(sel)
184        except RuntimeError:
185            if rcParams['verbose']:
186                print "Couldn't find any match."
187            else:
188                raise
189
190    def _select_copy(self, selection):
191        orig = self.get_selection()
192        self.set_selection(orig+selection)
193        cp = self.copy()
194        self.set_selection(orig)
195        return cp
196
197    def get_scan(self, scanid=None):
198        """
199        Return a specific scan (by scanno) or collection of scans (by
200        source name) in a new scantable.
201        Note:
202            See scantable.drop_scan() for the inverse operation.
203        Parameters:
204            scanid:    a (list of) scanno or a source name, unix-style
205                       patterns are accepted for source name matching, e.g.
206                       '*_R' gets all 'ref scans
207        Example:
208            # get all scans containing the source '323p459'
209            newscan = scan.get_scan('323p459')
210            # get all 'off' scans
211            refscans = scan.get_scan('*_R')
212            # get a susbset of scans by scanno (as listed in scan.summary())
213            newscan = scan.get_scan([0, 2, 7, 10])
214        """
215        if scanid is None:
216            if rcParams['verbose']:
217                print "Please specify a scan no or name to " \
218                      "retrieve from the scantable"
219                return
220            else:
221                raise RuntimeError("No scan given")
222
223        try:
224            bsel = self.get_selection()
225            sel = selector()
226            if type(scanid) is str:
227                sel.set_name(scanid)
228                return self._select_copy(sel)
229            elif type(scanid) is int:
230                sel.set_scans([scanid])
231                return self._select_copy(sel)
232            elif type(scanid) is list:
233                sel.set_scans(scanid)
234                return self._select_copy(sel)
235            else:
236                msg = "Illegal scanid type, use 'int' or 'list' if ints."
237                if rcParams['verbose']:
238                    print msg
239                else:
240                    raise TypeError(msg)
241        except RuntimeError:
242            if rcParams['verbose']: print "Couldn't find any match."
243            else: raise
244
245    def __str__(self):
246        return Scantable._summary(self, True)
247
248    def summary(self, filename=None):
249        """
250        Print a summary of the contents of this scantable.
251        Parameters:
252            filename:    the name of a file to write the putput to
253                         Default - no file output
254        """
255        info = Scantable._summary(self, True)
256        if filename is not None:
257            if filename is "":
258                filename = 'scantable_summary.txt'
259            from os.path import expandvars, isdir
260            filename = expandvars(filename)
261            if not isdir(filename):
262                data = open(filename, 'w')
263                data.write(info)
264                data.close()
265            else:
266                msg = "Illegal file name '%s'." % (filename)
267                if rcParams['verbose']:
268                    print msg
269                else:
270                    raise IOError(msg)
271        if rcParams['verbose']:
272            try:
273                from IPython.genutils import page as pager
274            except ImportError:
275                from pydoc import pager
276            pager(info)
277        else:
278            return info
279
280    def get_spectrum(self, rowno):
281        """Return the spectrum for the current row in the scantable as a list.
282        Parameters:
283             rowno:   the row number to retrieve the spectrum from
284        """
285        return self._getspectrum(rowno)
286
287    def get_mask(self, rowno):
288        """Return the mask for the current row in the scantable as a list.
289        Parameters:
290             rowno:   the row number to retrieve the mask from
291        """
292        return self._getmask(rowno)
293
294    def set_spectrum(self, spec, rowno):
295        """Return the spectrum for the current row in the scantable as a list.
296        Parameters:
297             spec:   the spectrum
298             rowno:    the row number to set the spectrum for
299        """
300        assert(len(spec) == self.nchan())
301        return self._setspectrum(spec, rowno)
302
303    def get_coordinate(self, rowno):
304        """Return the (spectral) coordinate for a a given 'rowno'.
305        NOTE:
306            * This coordinate is only valid until a scantable method modifies
307              the frequency axis.
308            * This coordinate does contain the original frequency set-up
309              NOT the new frame. The conversions however are done using the user
310              specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
311              use scantable.freq_align first. Without it there is no closure,
312              i.e.
313              c = myscan.get_coordinate(0)
314              c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
315
316        Parameters:
317             rowno:    the row number for the spectral coordinate
318
319        """
320        return coordinate(Scantable.get_coordinate(self, rowno))
321
322    def get_selection(self):
323        """
324        Get the selection object currently set on this scantable.
325        Parameters:
326            none
327        Example:
328            sel = scan.get_selection()
329            sel.set_ifs(0)              # select IF 0
330            scan.set_selection(sel)     # apply modified selection
331        """
332        return selector(self._getselection())
333
334    def set_selection(self, selection=None, **kw):
335        """
336        Select a subset of the data. All following operations on this scantable
337        are only applied to thi selection.
338        Parameters:
339            selection:    a selector object (default unset the selection),
340
341            or
342
343            any combination of
344            "pols", "ifs", "beams", "scans", "cycles", "name", "query"
345
346        Examples:
347            sel = selector()         # create a selection object
348            self.set_scans([0, 3])    # select SCANNO 0 and 3
349            scan.set_selection(sel)  # set the selection
350            scan.summary()           # will only print summary of scanno 0 an 3
351            scan.set_selection()     # unset the selection
352            # or the equivalent
353            scan.set_selection(scans=[0,3])
354            scan.summary()           # will only print summary of scanno 0 an 3
355            scan.set_selection()     # unset the selection
356        """
357        if selection is None:
358            # reset
359            if len(kw) == 0:
360                selection = selector()
361            else:
362                # try keywords
363                for k in kw:
364                    if k not in selector.fields:
365                        raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
366                selection = selector(**kw)
367        self._setselection(selection)
368
369    def stats(self, stat='stddev', mask=None):
370        """
371        Determine the specified statistic of the current beam/if/pol
372        Takes a 'mask' as an optional parameter to specify which
373        channels should be excluded.
374        Parameters:
375            stat:    'min', 'max', 'sumsq', 'sum', 'mean'
376                     'var', 'stddev', 'avdev', 'rms', 'median'
377            mask:    an optional mask specifying where the statistic
378                     should be determined.
379        Example:
380            scan.set_unit('channel')
381            msk = scan.create_mask([100, 200], [500, 600])
382            scan.stats(stat='mean', mask=m)
383        """
384        mask = mask or []
385        if not self._check_ifs():
386            raise ValueError("Cannot apply mask as the IFs have different "
387                             "number of channels. Please use setselection() "
388                             "to select individual IFs")
389
390        statvals = self._math._stats(self, mask, stat)
391        def cb(i):
392            return statvals[i]
393
394        return self._row_callback(cb, stat)
395
396    def stddev(self, mask=None):
397        """
398        Determine the standard deviation of the current beam/if/pol
399        Takes a 'mask' as an optional parameter to specify which
400        channels should be excluded.
401        Parameters:
402            mask:    an optional mask specifying where the standard
403                     deviation should be determined.
404
405        Example:
406            scan.set_unit('channel')
407            msk = scan.create_mask([100, 200], [500, 600])
408            scan.stddev(mask=m)
409        """
410        return self.stats(stat='stddev', mask=mask);
411
412
413    def get_column_names(self):
414        """
415        Return a  list of column names, which can be used for selection.
416        """
417        return list(Scantable.get_column_names(self))
418
419    def get_tsys(self):
420        """
421        Return the System temperatures.
422        Returns:
423            a list of Tsys values for the current selection
424        """
425
426        return self._row_callback(self._gettsys, "Tsys")
427
428    def _row_callback(self, callback, label):
429        out = ""
430        outvec = []
431        sep = '-'*50
432        for i in range(self.nrow()):
433            tm = self._gettime(i)
434            src = self._getsourcename(i)
435            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
436            out += 'Time[%s]:\n' % (tm)
437            if self.nbeam(-1) > 1:
438                out +=  ' Beam[%d] ' % (self.getbeam(i))
439            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (self.getif(i))
440            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (self.getpol(i))
441            outvec.append(callback(i))
442            out += '= %3.3f\n' % (outvec[i])
443            out +=  sep+'\n'
444        if rcParams['verbose']:
445            print sep
446            print " %s" % (label)
447            print sep
448            print out
449        return outvec
450
451    def _get_column(self, callback, row=-1):
452        """
453        """
454        if row == -1:
455            return [callback(i) for i in range(self.nrow())]
456        else:
457            if  0 <= row < self.nrow():
458                return callback(row)
459
460
461    def get_time(self, row=-1, asdatetime=False):
462        """
463        Get a list of time stamps for the observations.
464        Return a datetime object for each integration time stamp in the scantable.
465        Parameters:
466            row:          row no of integration. Default -1 return all rows
467            asdatetime:   return values as datetime objects rather than strings
468        Example:
469            none
470        """
471        from time import strptime
472        from datetime import datetime
473        times = self._get_column(self._gettime, row)
474        if not asdatetime:
475            return times
476        format = "%Y/%m/%d/%H:%M:%S"
477        if isinstance(times, list):
478            return [datetime(*strptime(i, format)[:6]) for i in times]
479        else:
480            return datetime(*strptime(times, format)[:6])
481
482
483    def get_inttime(self, row=-1):
484        """
485        Get a list of integration times for the observations.
486        Return a time in seconds for each integration in the scantable.
487        Parameters:
488            row:    row no of integration. Default -1 return all rows.
489        Example:
490            none
491        """
492        return self._get_column(self._getinttime, row)
493
494
495    def get_sourcename(self, row=-1):
496        """
497        Get a list source names for the observations.
498        Return a string for each integration in the scantable.
499        Parameters:
500            row:    row no of integration. Default -1 return all rows.
501        Example:
502            none
503        """
504        return self._get_column(self._getsourcename, row)
505
506    def get_elevation(self, row=-1):
507        """
508        Get a list of elevations for the observations.
509        Return a float for each integration in the scantable.
510        Parameters:
511            row:    row no of integration. Default -1 return all rows.
512        Example:
513            none
514        """
515        return self._get_column(self._getelevation, row)
516
517    def get_azimuth(self, row=-1):
518        """
519        Get a list of azimuths for the observations.
520        Return a float for each integration in the scantable.
521        Parameters:
522            row:    row no of integration. Default -1 return all rows.
523        Example:
524            none
525        """
526        return self._get_column(self._getazimuth, row)
527
528    def get_parangle(self, row=-1):
529        """
530        Get a list of parallactic angles for the observations.
531        Return a float for each integration in the scantable.
532        Parameters:
533            row:    row no of integration. Default -1 return all rows.
534        Example:
535            none
536        """
537        return self._get_column(self._getparangle, row)
538
539    def get_direction(self, row=-1):
540        """
541        Get a list of Positions on the sky (direction) for the observations.
542        Return a string for each integration in the scantable.
543        Parameters:
544            row:    row no of integration. Default -1 return all rows
545        Example:
546            none
547        """
548        return self._get_column(self._getdirection, row)
549
550    def get_directionval(self, row=-1):
551        """
552        Get a list of Positions on the sky (direction) for the observations.
553        Return a float for each integration in the scantable.
554        Parameters:
555            row:    row no of integration. Default -1 return all rows
556        Example:
557            none
558        """
559        return self._get_column(self._getdirectionvec, row)
560
561
562    def set_unit(self, unit='channel'):
563        """
564        Set the unit for all following operations on this scantable
565        Parameters:
566            unit:    optional unit, default is 'channel'
567                     one of '*Hz', 'km/s', 'channel', ''
568        """
569        varlist = vars()
570        if unit in ['', 'pixel', 'channel']:
571            unit = ''
572        inf = list(self._getcoordinfo())
573        inf[0] = unit
574        self._setcoordinfo(inf)
575        self._add_history("set_unit", varlist)
576
577    @print_log_dec
578    def set_instrument(self, instr):
579        """
580        Set the instrument for subsequent processing.
581        Parameters:
582            instr:    Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
583                      'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
584        """
585        self._setInstrument(instr)
586        self._add_history("set_instument", vars())
587
588    @print_log_dec
589    def set_feedtype(self, feedtype):
590        """
591        Overwrite the feed type, which might not be set correctly.
592        Parameters:
593            feedtype:     'linear' or 'circular'
594        """
595        self._setfeedtype(feedtype)
596        self._add_history("set_feedtype", vars())
597
598    @print_log_dec
599    def set_doppler(self, doppler='RADIO'):
600        """
601        Set the doppler for all following operations on this scantable.
602        Parameters:
603            doppler:    One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
604        """
605        varlist = vars()
606        inf = list(self._getcoordinfo())
607        inf[2] = doppler
608        self._setcoordinfo(inf)
609        self._add_history("set_doppler", vars())
610
611    @print_log_dec
612    def set_freqframe(self, frame=None):
613        """
614        Set the frame type of the Spectral Axis.
615        Parameters:
616            frame:   an optional frame type, default 'LSRK'. Valid frames are:
617                     'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
618                     'GEO', 'GALACTO', 'LGROUP', 'CMB'
619        Examples:
620            scan.set_freqframe('BARY')
621        """
622        frame = frame or rcParams['scantable.freqframe']
623        varlist = vars()
624        valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
625                   'GEO', 'GALACTO', 'LGROUP', 'CMB']
626
627        if frame in valid:
628            inf = list(self._getcoordinfo())
629            inf[1] = frame
630            self._setcoordinfo(inf)
631            self._add_history("set_freqframe", varlist)
632        else:
633            msg  = "Please specify a valid freq type. Valid types are:\n", valid
634            if rcParams['verbose']:
635                print msg
636            else:
637                raise TypeError(msg)
638
639    def set_dirframe(self, frame=""):
640        """
641        Set the frame type of the Direction on the sky.
642        Parameters:
643            frame:   an optional frame type, default ''. Valid frames are:
644                     'J2000', 'B1950', 'GALACTIC'
645        Examples:
646            scan.set_dirframe('GALACTIC')
647        """
648        varlist = vars()
649        try:
650            Scantable.set_dirframe(self, frame)
651        except RuntimeError, msg:
652            if rcParams['verbose']:
653                print msg
654            else:
655                raise
656        self._add_history("set_dirframe", varlist)
657
658    def get_unit(self):
659        """
660        Get the default unit set in this scantable
661        Returns:
662            A unit string
663        """
664        inf = self._getcoordinfo()
665        unit = inf[0]
666        if unit == '': unit = 'channel'
667        return unit
668
669    def get_abcissa(self, rowno=0):
670        """
671        Get the abcissa in the current coordinate setup for the currently
672        selected Beam/IF/Pol
673        Parameters:
674            rowno:    an optional row number in the scantable. Default is the
675                      first row, i.e. rowno=0
676        Returns:
677            The abcissa values and the format string (as a dictionary)
678        """
679        abc = self._getabcissa(rowno)
680        lbl = self._getabcissalabel(rowno)
681        return abc, lbl
682
683    def flag(self, mask=None):
684        """
685        Flag the selected data using an optional channel mask.
686        Parameters:
687            mask:   an optional channel mask, created with create_mask. Default
688                    (no mask) is all channels.
689        """
690        varlist = vars()
691        mask = mask or []
692        try:
693            self._flag(mask)
694        except RuntimeError, msg:
695            if rcParams['verbose']:
696                print msg
697                return
698            else: raise
699        self._add_history("flag", varlist)
700
701    @print_log_dec
702    def lag_flag(self, start, end, unit="MHz", insitu=None):
703        """
704        Flag the data in 'lag' space by providing a frequency to remove.
705        Flagged data in the scantable gets interpolated over the region.
706        No taper is applied.
707        Parameters:
708            start:    the start frequency (really a period within the
709                      bandwidth)  or period to remove
710            end:      the end frequency or period to remove
711            unit:     the frequency unit (default "MHz") or "" for
712                      explicit lag channels
713        Notes:
714            It is recommended to flag edges of the band or strong
715            signals beforehand.
716        """
717        if insitu is None: insitu = rcParams['insitu']
718        self._math._setinsitu(insitu)
719        varlist = vars()
720        base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
721        if not (unit == "" or base.has_key(unit)):
722            raise ValueError("%s is not a valid unit." % unit)
723        try:
724            if unit == "":
725                s = scantable(self._math._lag_flag(self, start, end, "lags"))
726            else:
727                s = scantable(self._math._lag_flag(self, start*base[unit],
728                                                   end*base[unit], "frequency"))
729        except RuntimeError, msg:
730            if rcParams['verbose']:
731                print msg
732                return
733            else: raise
734        s._add_history("lag_flag", varlist)
735        if insitu:
736            self._assign(s)
737        else:
738            return s
739
740    @print_log_dec
741    def create_mask(self, *args, **kwargs):
742        """
743        Compute and return a mask based on [min, max] windows.
744        The specified windows are to be INCLUDED, when the mask is
745        applied.
746        Parameters:
747            [min, max], [min2, max2], ...
748                Pairs of start/end points (inclusive)specifying the regions
749                to be masked
750            invert:     optional argument. If specified as True,
751                        return an inverted mask, i.e. the regions
752                        specified are EXCLUDED
753            row:        create the mask using the specified row for
754                        unit conversions, default is row=0
755                        only necessary if frequency varies over rows.
756        Example:
757            scan.set_unit('channel')
758            a)
759            msk = scan.create_mask([400, 500], [800, 900])
760            # masks everything outside 400 and 500
761            # and 800 and 900 in the unit 'channel'
762
763            b)
764            msk = scan.create_mask([400, 500], [800, 900], invert=True)
765            # masks the regions between 400 and 500
766            # and 800 and 900 in the unit 'channel'
767            c)
768            mask only channel 400
769            msk =  scan.create_mask([400])
770        """
771        row = kwargs.get("row", 0)
772        data = self._getabcissa(row)
773        u = self._getcoordinfo()[0]
774        if rcParams['verbose']:
775            if u == "": u = "channel"
776            msg = "The current mask window unit is %s" % u
777            i = self._check_ifs()
778            if not i:
779                msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
780            asaplog.push(msg)
781        n = self.nchan()
782        msk = _n_bools(n, False)
783        # test if args is a 'list' or a 'normal *args - UGLY!!!
784
785        ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
786             and args or args[0]
787        for window in ws:
788            if len(window) == 1:
789                window = [window[0], window[0]]
790            if len(window) == 0 or  len(window) > 2:
791                raise ValueError("A window needs to be defined as [start(, end)]")
792            if window[0] > window[1]:
793                tmp = window[0]
794                window[0] = window[1]
795                window[1] = tmp
796            for i in range(n):
797                if data[i] >= window[0] and data[i] <= window[1]:
798                    msk[i] = True
799        if kwargs.has_key('invert'):
800            if kwargs.get('invert'):
801                msk = mask_not(msk)
802        return msk
803
804    def get_restfreqs(self):
805        """
806        Get the restfrequency(s) stored in this scantable.
807        The return value(s) are always of unit 'Hz'
808        Parameters:
809            none
810        Returns:
811            a list of doubles
812        """
813        return list(self._getrestfreqs())
814
815
816    def set_restfreqs(self, freqs=None, unit='Hz'):
817        """
818        Set or replace the restfrequency specified and
819        If the 'freqs' argument holds a scalar,
820        then that rest frequency will be applied to all the selected
821        data.  If the 'freqs' argument holds
822        a vector, then it MUST be of equal or smaller length than
823        the number of IFs (and the available restfrequencies will be
824        replaced by this vector).  In this case, *all* data have
825        the restfrequency set per IF according
826        to the corresponding value you give in the 'freqs' vector.
827        E.g. 'freqs=[1e9, 2e9]'  would mean IF 0 gets restfreq 1e9 and
828        IF 1 gets restfreq 2e9.
829        You can also specify the frequencies via a linecatalog.
830
831        Parameters:
832            freqs:   list of rest frequency values or string idenitfiers
833            unit:    unit for rest frequency (default 'Hz')
834
835        Example:
836            # set the given restfrequency for the whole table
837            scan.set_restfreqs(freqs=1.4e9)
838            # If thee number of IFs in the data is >= 2 IF0 gets the first
839            # value IF1 the second...
840            scan.set_restfreqs(freqs=[1.4e9, 1.67e9])
841            #set the given restfrequency for the whole table (by name)
842            scan.set_restfreqs(freqs="OH1667")
843
844        Note:
845            To do more sophisticate Restfrequency setting, e.g. on a
846            source and IF basis, use scantable.set_selection() before using
847            this function.
848            # provide your scantable is called scan
849            selection = selector()
850            selection.set_name("ORION*")
851            selection.set_ifs([1])
852            scan.set_selection(selection)
853            scan.set_restfreqs(freqs=86.6e9)
854
855        """
856        varlist = vars()
857        from asap import linecatalog
858        # simple  value
859        if isinstance(freqs, int) or isinstance(freqs, float):
860            self._setrestfreqs(freqs, "",unit)
861        # list of values
862        elif isinstance(freqs, list) or isinstance(freqs, tuple):
863            # list values are scalars
864            if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
865                sel = selector()
866                savesel = self._getselection()
867                iflist = self.getifnos()
868                for i in xrange(len(freqs)):
869                    sel.set_ifs(iflist[i])
870                    self._setselection(sel)
871                    self._setrestfreqs(freqs[i], "",unit)
872                self._setselection(savesel)
873            # list values are tuples, (value, name)
874            elif isinstance(freqs[-1], dict):
875                sel = selector()
876                savesel = self._getselection()
877                iflist = self.getifnos()
878                for i in xrange(len(freqs)):
879                    sel.set_ifs(iflist[i])
880                    self._setselection(sel)
881                    self._setrestfreqs(freqs[i]["value"],
882                                       freqs[i]["name"], "MHz")
883                self._setselection(savesel)
884        # freqs are to be taken from a linecatalog
885        elif isinstance(freqs, linecatalog):
886            sel = selector()
887            savesel = self._getselection()
888            iflist = self.getifnos()
889            for i in xrange(freqs.nrow()):
890                sel.set_ifs(iflist[i])
891                self._setselection(sel)
892                self._setrestfreqs(freqs.get_frequency(i),
893                                   freqs.get_name(i), "MHz")
894                # ensure that we are not iterating past nIF
895                if i == self.nif()-1: break
896            self._setselection(savesel)
897        else:
898            return
899        self._add_history("set_restfreqs", varlist)
900
901    def shift_refpix(self, delta):
902        """
903        Shift the reference pixel of the Spectra Coordinate by an
904        integer amount.
905        Parameters:
906            delta:   the amount to shift by
907        Note:
908            Be careful using this with broadband data.
909        """
910        Scantable.shift(self, delta)
911
912    def history(self, filename=None):
913        """
914        Print the history. Optionally to a file.
915        Parameters:
916            filename:    The name  of the file to save the history to.
917        """
918        hist = list(self._gethistory())
919        out = "-"*80
920        for h in hist:
921            if h.startswith("---"):
922                out += "\n"+h
923            else:
924                items = h.split("##")
925                date = items[0]
926                func = items[1]
927                items = items[2:]
928                out += "\n"+date+"\n"
929                out += "Function: %s\n  Parameters:" % (func)
930                for i in items:
931                    s = i.split("=")
932                    out += "\n   %s = %s" % (s[0], s[1])
933                out += "\n"+"-"*80
934        if filename is not None:
935            if filename is "":
936                filename = 'scantable_history.txt'
937            import os
938            filename = os.path.expandvars(os.path.expanduser(filename))
939            if not os.path.isdir(filename):
940                data = open(filename, 'w')
941                data.write(out)
942                data.close()
943            else:
944                msg = "Illegal file name '%s'." % (filename)
945                if rcParams['verbose']:
946                    print msg
947                else:
948                    raise IOError(msg)
949        if rcParams['verbose']:
950            try:
951                from IPython.genutils import page as pager
952            except ImportError:
953                from pydoc import pager
954            pager(out)
955        else:
956            return out
957        return
958    #
959    # Maths business
960    #
961    @print_log_dec
962    def average_time(self, mask=None, scanav=False, weight='tint', align=False):
963        """
964        Return the (time) weighted average of a scan.
965        Note:
966            in channels only - align if necessary
967        Parameters:
968            mask:     an optional mask (only used for 'var' and 'tsys'
969                      weighting)
970            scanav:   True averages each scan separately
971                      False (default) averages all scans together,
972            weight:   Weighting scheme.
973                      'none'     (mean no weight)
974                      'var'      (1/var(spec) weighted)
975                      'tsys'     (1/Tsys**2 weighted)
976                      'tint'     (integration time weighted)
977                      'tintsys'  (Tint/Tsys**2)
978                      'median'   ( median averaging)
979                      The default is 'tint'
980            align:    align the spectra in velocity before averaging. It takes
981                      the time of the first spectrum as reference time.
982        Example:
983            # time average the scantable without using a mask
984            newscan = scan.average_time()
985        """
986        varlist = vars()
987        weight = weight or 'TINT'
988        mask = mask or ()
989        scanav = (scanav and 'SCAN') or 'NONE'
990        scan = (self, )
991        try:
992            if align:
993                scan = (self.freq_align(insitu=False), )
994            s = None
995            if weight.upper() == 'MEDIAN':
996                s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
997                                                         scanav))
998            else:
999                s = scantable(self._math._average(scan, mask, weight.upper(),
1000                              scanav))
1001        except RuntimeError, msg:
1002            if rcParams['verbose']:
1003                print msg
1004                return
1005            else: raise
1006        s._add_history("average_time", varlist)
1007        return s
1008
1009    @print_log_dec
1010    def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
1011        """
1012        Return a scan where all spectra are converted to either
1013        Jansky or Kelvin depending upon the flux units of the scan table.
1014        By default the function tries to look the values up internally.
1015        If it can't find them (or if you want to over-ride), you must
1016        specify EITHER jyperk OR eta (and D which it will try to look up
1017        also if you don't set it). jyperk takes precedence if you set both.
1018        Parameters:
1019            jyperk:      the Jy / K conversion factor
1020            eta:         the aperture efficiency
1021            d:           the geomtric diameter (metres)
1022            insitu:      if False a new scantable is returned.
1023                         Otherwise, the scaling is done in-situ
1024                         The default is taken from .asaprc (False)
1025        """
1026        if insitu is None: insitu = rcParams['insitu']
1027        self._math._setinsitu(insitu)
1028        varlist = vars()
1029        jyperk = jyperk or -1.0
1030        d = d or -1.0
1031        eta = eta or -1.0
1032        s = scantable(self._math._convertflux(self, d, eta, jyperk))
1033        s._add_history("convert_flux", varlist)
1034        if insitu: self._assign(s)
1035        else: return s
1036
1037    @print_log_dec
1038    def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1039        """
1040        Return a scan after applying a gain-elevation correction.
1041        The correction can be made via either a polynomial or a
1042        table-based interpolation (and extrapolation if necessary).
1043        You specify polynomial coefficients, an ascii table or neither.
1044        If you specify neither, then a polynomial correction will be made
1045        with built in coefficients known for certain telescopes (an error
1046        will occur if the instrument is not known).
1047        The data and Tsys are *divided* by the scaling factors.
1048        Parameters:
1049            poly:        Polynomial coefficients (default None) to compute a
1050                         gain-elevation correction as a function of
1051                         elevation (in degrees).
1052            filename:    The name of an ascii file holding correction factors.
1053                         The first row of the ascii file must give the column
1054                         names and these MUST include columns
1055                         "ELEVATION" (degrees) and "FACTOR" (multiply data
1056                         by this) somewhere.
1057                         The second row must give the data type of the
1058                         column. Use 'R' for Real and 'I' for Integer.
1059                         An example file would be
1060                         (actual factors are arbitrary) :
1061
1062                         TIME ELEVATION FACTOR
1063                         R R R
1064                         0.1 0 0.8
1065                         0.2 20 0.85
1066                         0.3 40 0.9
1067                         0.4 60 0.85
1068                         0.5 80 0.8
1069                         0.6 90 0.75
1070            method:      Interpolation method when correcting from a table.
1071                         Values are  "nearest", "linear" (default), "cubic"
1072                         and "spline"
1073            insitu:      if False a new scantable is returned.
1074                         Otherwise, the scaling is done in-situ
1075                         The default is taken from .asaprc (False)
1076        """
1077
1078        if insitu is None: insitu = rcParams['insitu']
1079        self._math._setinsitu(insitu)
1080        varlist = vars()
1081        poly = poly or ()
1082        from os.path import expandvars
1083        filename = expandvars(filename)
1084        s = scantable(self._math._gainel(self, poly, filename, method))
1085        s._add_history("gain_el", varlist)
1086        if insitu:
1087            self._assign(s)
1088        else:
1089            return s
1090
1091    @print_log_dec
1092    def freq_align(self, reftime=None, method='cubic', insitu=None):
1093        """
1094        Return a scan where all rows have been aligned in frequency/velocity.
1095        The alignment frequency frame (e.g. LSRK) is that set by function
1096        set_freqframe.
1097        Parameters:
1098            reftime:     reference time to align at. By default, the time of
1099                         the first row of data is used.
1100            method:      Interpolation method for regridding the spectra.
1101                         Choose from "nearest", "linear", "cubic" (default)
1102                         and "spline"
1103            insitu:      if False a new scantable is returned.
1104                         Otherwise, the scaling is done in-situ
1105                         The default is taken from .asaprc (False)
1106        """
1107        if insitu is None: insitu = rcParams["insitu"]
1108        self._math._setinsitu(insitu)
1109        varlist = vars()
1110        reftime = reftime or ""
1111        s = scantable(self._math._freq_align(self, reftime, method))
1112        s._add_history("freq_align", varlist)
1113        if insitu: self._assign(s)
1114        else: return s
1115
1116    @print_log_dec
1117    def opacity(self, tau=None, insitu=None):
1118        """
1119        Apply an opacity correction. The data
1120        and Tsys are multiplied by the correction factor.
1121        Parameters:
1122            tau:         (list of) opacity from which the correction factor is
1123                         exp(tau*ZD)
1124                         where ZD is the zenith-distance.
1125                         If a list is provided, it has to be of length nIF,
1126                         nIF*nPol or 1 and in order of IF/POL, e.g.
1127                         [opif0pol0, opif0pol1, opif1pol0 ...]
1128                         if tau is `None` the opacities are determined from a
1129                         model.
1130            insitu:      if False a new scantable is returned.
1131                         Otherwise, the scaling is done in-situ
1132                         The default is taken from .asaprc (False)
1133        """
1134        if insitu is None: insitu = rcParams['insitu']
1135        self._math._setinsitu(insitu)
1136        varlist = vars()
1137        if not hasattr(tau, "__len__"):
1138            tau = [tau]
1139        s = scantable(self._math._opacity(self, tau))
1140        s._add_history("opacity", varlist)
1141        if insitu: self._assign(s)
1142        else: return s
1143
1144    @print_log_dec
1145    def bin(self, width=5, insitu=None):
1146        """
1147        Return a scan where all spectra have been binned up.
1148        Parameters:
1149            width:       The bin width (default=5) in pixels
1150            insitu:      if False a new scantable is returned.
1151                         Otherwise, the scaling is done in-situ
1152                         The default is taken from .asaprc (False)
1153        """
1154        if insitu is None: insitu = rcParams['insitu']
1155        self._math._setinsitu(insitu)
1156        varlist = vars()
1157        s = scantable(self._math._bin(self, width))
1158        s._add_history("bin", varlist)
1159        if insitu:
1160            self._assign(s)
1161        else:
1162            return s
1163
1164    @print_log_dec
1165    def resample(self, width=5, method='cubic', insitu=None):
1166        """
1167        Return a scan where all spectra have been binned up.
1168
1169        Parameters:
1170            width:       The bin width (default=5) in pixels
1171            method:      Interpolation method when correcting from a table.
1172                         Values are  "nearest", "linear", "cubic" (default)
1173                         and "spline"
1174            insitu:      if False a new scantable is returned.
1175                         Otherwise, the scaling is done in-situ
1176                         The default is taken from .asaprc (False)
1177        """
1178        if insitu is None: insitu = rcParams['insitu']
1179        self._math._setinsitu(insitu)
1180        varlist = vars()
1181        s = scantable(self._math._resample(self, method, width))
1182        s._add_history("resample", varlist)
1183        if insitu: self._assign(s)
1184        else: return s
1185
1186    @print_log_dec
1187    def average_pol(self, mask=None, weight='none'):
1188        """
1189        Average the Polarisations together.
1190        Parameters:
1191            mask:        An optional mask defining the region, where the
1192                         averaging will be applied. The output will have all
1193                         specified points masked.
1194            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1195                         weighted), or 'tsys' (1/Tsys**2 weighted)
1196        """
1197        varlist = vars()
1198        mask = mask or ()
1199        s = scantable(self._math._averagepol(self, mask, weight.upper()))
1200        s._add_history("average_pol", varlist)
1201        return s
1202
1203    @print_log_dec
1204    def average_beam(self, mask=None, weight='none'):
1205        """
1206        Average the Beams together.
1207        Parameters:
1208            mask:        An optional mask defining the region, where the
1209                         averaging will be applied. The output will have all
1210                         specified points masked.
1211            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1212                         weighted), or 'tsys' (1/Tsys**2 weighted)
1213        """
1214        varlist = vars()
1215        mask = mask or ()
1216        s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1217        s._add_history("average_beam", varlist)
1218        return s
1219
1220    def parallactify(self, pflag):
1221        """
1222        Set a flag to inidcate whether this data should be treated as having
1223        been 'parallactified' (total phase == 0.0)
1224        Parameters:
1225            pflag:  Bool inidcating whether to turn this on (True) or
1226                    off (False)
1227        """
1228        varlist = vars()
1229        self._parallactify(pflag)
1230        self._add_history("parallactify", varlist)
1231
1232    @print_log_dec
1233    def convert_pol(self, poltype=None):
1234        """
1235        Convert the data to a different polarisation type.
1236        Note that you will need cross-polarisation terms for most conversions.
1237        Parameters:
1238            poltype:    The new polarisation type. Valid types are:
1239                        "linear", "circular", "stokes" and "linpol"
1240        """
1241        varlist = vars()
1242        try:
1243            s = scantable(self._math._convertpol(self, poltype))
1244        except RuntimeError, msg:
1245            if rcParams['verbose']:
1246                print msg
1247                return
1248            else:
1249                raise
1250        s._add_history("convert_pol", varlist)
1251        return s
1252
1253    @print_log_dec
1254    def smooth(self, kernel="hanning", width=5.0, order=2, insitu=None):
1255        """
1256        Smooth the spectrum by the specified kernel (conserving flux).
1257        Parameters:
1258            kernel:     The type of smoothing kernel. Select from
1259                        'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1260                        or 'poly'
1261            width:      The width of the kernel in pixels. For hanning this is
1262                        ignored otherwise it defauls to 5 pixels.
1263                        For 'gaussian' it is the Full Width Half
1264                        Maximum. For 'boxcar' it is the full width.
1265                        For 'rmedian' and 'poly' it is the half width.
1266            order:      Optional parameter for 'poly' kernel (default is 2), to
1267                        specify the order of the polnomial. Ignored by all other
1268                        kernels.
1269            insitu:     if False a new scantable is returned.
1270                        Otherwise, the scaling is done in-situ
1271                        The default is taken from .asaprc (False)
1272        Example:
1273             none
1274        """
1275        if insitu is None: insitu = rcParams['insitu']
1276        self._math._setinsitu(insitu)
1277        varlist = vars()
1278        s = scantable(self._math._smooth(self, kernel.lower(), width, order))
1279        s._add_history("smooth", varlist)
1280        if insitu: self._assign(s)
1281        else: return s
1282
1283    @print_log_dec
1284    def poly_baseline(self, mask=None, order=0, plot=False, uselin=False,
1285                      insitu=None):
1286        """
1287        Return a scan which has been baselined (all rows) by a polynomial.
1288        Parameters:
1289            mask:       an optional mask
1290            order:      the order of the polynomial (default is 0)
1291            plot:       plot the fit and the residual. In this each
1292                        indivual fit has to be approved, by typing 'y'
1293                        or 'n'
1294            uselin:     use linear polynomial fit
1295            insitu:     if False a new scantable is returned.
1296                        Otherwise, the scaling is done in-situ
1297                        The default is taken from .asaprc (False)
1298        Example:
1299            # return a scan baselined by a third order polynomial,
1300            # not using a mask
1301            bscan = scan.poly_baseline(order=3)
1302        """
1303        if insitu is None: insitu = rcParams['insitu']
1304        varlist = vars()
1305        if mask is None:
1306            mask = [True for i in xrange(self.nchan(-1))]
1307        from asap.asapfitter import fitter
1308        try:
1309            f = fitter()
1310            f.set_scan(self, mask)
1311            #f.set_function(poly=order)
1312            if uselin:
1313                f.set_function(lpoly=order)
1314            else:
1315                f.set_function(poly=order)
1316            s = f.auto_fit(insitu, plot=plot)
1317            s._add_history("poly_baseline", varlist)
1318            if insitu: self._assign(s)
1319            else: return s
1320        except RuntimeError:
1321            msg = "The fit failed, possibly because it didn't converge."
1322            if rcParams['verbose']:
1323                print msg
1324                return
1325            else:
1326                raise RuntimeError(msg)
1327
1328    def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
1329                           threshold=3, chan_avg_limit=1, plot=False,
1330                           insitu=None):
1331        """
1332        Return a scan which has been baselined (all rows) by a polynomial.
1333        Spectral lines are detected first using linefinder and masked out
1334        to avoid them affecting the baseline solution.
1335
1336        Parameters:
1337            mask:       an optional mask retreived from scantable
1338            edge:       an optional number of channel to drop at
1339                        the edge of spectrum. If only one value is
1340                        specified, the same number will be dropped from
1341                        both sides of the spectrum. Default is to keep
1342                        all channels. Nested tuples represent individual
1343                        edge selection for different IFs (a number of spectral
1344                        channels can be different)
1345            order:      the order of the polynomial (default is 0)
1346            threshold:  the threshold used by line finder. It is better to
1347                        keep it large as only strong lines affect the
1348                        baseline solution.
1349            chan_avg_limit:
1350                        a maximum number of consequtive spectral channels to
1351                        average during the search of weak and broad lines.
1352                        The default is no averaging (and no search for weak
1353                        lines). If such lines can affect the fitted baseline
1354                        (e.g. a high order polynomial is fitted), increase this
1355                        parameter (usually values up to 8 are reasonable). Most
1356                        users of this method should find the default value
1357                        sufficient.
1358            plot:       plot the fit and the residual. In this each
1359                        indivual fit has to be approved, by typing 'y'
1360                        or 'n'
1361            insitu:     if False a new scantable is returned.
1362                        Otherwise, the scaling is done in-situ
1363                        The default is taken from .asaprc (False)
1364
1365        Example:
1366            scan2=scan.auto_poly_baseline(order=7)
1367        """
1368        if insitu is None: insitu = rcParams['insitu']
1369        varlist = vars()
1370        from asap.asapfitter import fitter
1371        from asap.asaplinefind import linefinder
1372        from asap import _is_sequence_or_number as _is_valid
1373
1374        # check whether edge is set up for each IF individually
1375        individualedge = False;
1376        if len(edge) > 1:
1377            if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1378                individualedge = True;
1379
1380        if not _is_valid(edge, int) and not individualedge:
1381            raise ValueError, "Parameter 'edge' has to be an integer or a \
1382            pair of integers specified as a tuple. Nested tuples are allowed \
1383            to make individual selection for different IFs."
1384
1385        curedge = (0, 0)
1386        if individualedge:
1387            for edgepar in edge:
1388                if not _is_valid(edgepar, int):
1389                    raise ValueError, "Each element of the 'edge' tuple has \
1390                                       to be a pair of integers or an integer."
1391        else:
1392            curedge = edge;
1393
1394        # setup fitter
1395        f = fitter()
1396        f.set_function(poly=order)
1397
1398        # setup line finder
1399        fl = linefinder()
1400        fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
1401
1402        if not insitu:
1403            workscan = self.copy()
1404        else:
1405            workscan = self
1406
1407        fl.set_scan(workscan)
1408
1409        rows = range(workscan.nrow())
1410        asaplog.push("Processing:")
1411        for r in rows:
1412            msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1413                (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1414                 workscan.getpol(r), workscan.getcycle(r))
1415            asaplog.push(msg, False)
1416
1417            # figure out edge parameter
1418            if individualedge:
1419                if len(edge) >= workscan.getif(r):
1420                    raise RuntimeError, "Number of edge elements appear to " \
1421                                        "be less than the number of IFs"
1422                    curedge = edge[workscan.getif(r)]
1423
1424            # setup line finder
1425            fl.find_lines(r, mask, curedge)
1426            f.set_data(workscan._getabcissa(r),  workscan._getspectrum(r),
1427                        mask_and(workscan._getmask(r), fl.get_mask()))
1428            f.fit()
1429            x = f.get_parameters()
1430            if plot:
1431                f.plot(residual=True)
1432                x = raw_input("Accept fit ( [y]/n ): ")
1433                if x.upper() == 'N':
1434                    continue
1435            workscan._setspectrum(f.fitter.getresidual(), r)
1436        if plot:
1437            f._p.unmap()
1438            f._p = None
1439        workscan._add_history("auto_poly_baseline", varlist)
1440        if insitu:
1441            self._assign(workscan)
1442        else:
1443            return workscan
1444
1445    @print_log_dec
1446    def rotate_linpolphase(self, angle):
1447        """
1448        Rotate the phase of the complex polarization O=Q+iU correlation.
1449        This is always done in situ in the raw data.  So if you call this
1450        function more than once then each call rotates the phase further.
1451        Parameters:
1452            angle:   The angle (degrees) to rotate (add) by.
1453        Examples:
1454            scan.rotate_linpolphase(2.3)
1455        """
1456        varlist = vars()
1457        self._math._rotate_linpolphase(self, angle)
1458        self._add_history("rotate_linpolphase", varlist)
1459        return
1460
1461    @print_log_dec
1462    def rotate_xyphase(self, angle):
1463        """
1464        Rotate the phase of the XY correlation.  This is always done in situ
1465        in the data.  So if you call this function more than once
1466        then each call rotates the phase further.
1467        Parameters:
1468            angle:   The angle (degrees) to rotate (add) by.
1469        Examples:
1470            scan.rotate_xyphase(2.3)
1471        """
1472        varlist = vars()
1473        self._math._rotate_xyphase(self, angle)
1474        self._add_history("rotate_xyphase", varlist)
1475        return
1476
1477    @print_log_dec
1478    def swap_linears(self):
1479        """
1480        Swap the linear polarisations XX and YY, or better the first two
1481        polarisations as this also works for ciculars.
1482        """
1483        varlist = vars()
1484        self._math._swap_linears(self)
1485        self._add_history("swap_linears", varlist)
1486        return
1487
1488    @print_log_dec
1489    def invert_phase(self):
1490        """
1491        Invert the phase of the complex polarisation
1492        """
1493        varlist = vars()
1494        self._math._invert_phase(self)
1495        self._add_history("invert_phase", varlist)
1496        return
1497
1498    @print_log_dec
1499    def add(self, offset, insitu=None):
1500        """
1501        Return a scan where all spectra have the offset added
1502        Parameters:
1503            offset:      the offset
1504            insitu:      if False a new scantable is returned.
1505                         Otherwise, the scaling is done in-situ
1506                         The default is taken from .asaprc (False)
1507        """
1508        if insitu is None: insitu = rcParams['insitu']
1509        self._math._setinsitu(insitu)
1510        varlist = vars()
1511        s = scantable(self._math._unaryop(self, offset, "ADD", False))
1512        s._add_history("add", varlist)
1513        if insitu:
1514            self._assign(s)
1515        else:
1516            return s
1517
1518    @print_log_dec
1519    def scale(self, factor, tsys=True, insitu=None):
1520        """
1521        Return a scan where all spectra are scaled by the give 'factor'
1522        Parameters:
1523            factor:      the scaling factor
1524            insitu:      if False a new scantable is returned.
1525                         Otherwise, the scaling is done in-situ
1526                         The default is taken from .asaprc (False)
1527            tsys:        if True (default) then apply the operation to Tsys
1528                         as well as the data
1529        """
1530        if insitu is None: insitu = rcParams['insitu']
1531        self._math._setinsitu(insitu)
1532        varlist = vars()
1533        s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
1534        s._add_history("scale", varlist)
1535        if insitu:
1536            self._assign(s)
1537        else:
1538            return s
1539
1540    def set_sourcetype(self, match, matchtype="pattern",
1541                       sourcetype="reference"):
1542        """
1543        Set the type of the source to be an source or reference scan
1544        using the provided pattern:
1545        Parameters:
1546            match:          a Unix style pattern, regular expression or selector
1547            matchtype:      'pattern' (default) UNIX style pattern or
1548                            'regex' regular expression
1549            sourcetype:     the type of the source to use (source/reference)
1550        """
1551        varlist = vars()
1552        basesel = self.get_selection()
1553        stype = -1
1554        if sourcetype.lower().startswith("r"):
1555            stype = 1
1556        elif sourcetype.lower().startswith("s"):
1557            stype = 0
1558        else:
1559            raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
1560        if matchtype.lower().startswith("p"):
1561            matchtype = "pattern"
1562        elif matchtype.lower().startswith("r"):
1563            matchtype = "regex"
1564        else:
1565            raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
1566        sel = selector()
1567        if isinstance(match, selector):
1568            sel = match
1569        else:
1570            sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
1571        self.set_selection(basesel+sel)
1572        self._setsourcetype(stype)
1573        self.set_selection(basesel)
1574        self._add_history("set_sourcetype", varlist)
1575
1576    @print_log_dec
1577    def auto_quotient(self, preserve=True, mode='paired'):
1578        """
1579        This function allows to build quotients automatically.
1580        It assumes the observation to have the same numer of
1581        "ons" and "offs"
1582        Parameters:
1583            preserve:       you can preserve (default) the continuum or
1584                            remove it.  The equations used are
1585                            preserve: Output = Toff * (on/off) - Toff
1586                            remove:   Output = Toff * (on/off) - Ton
1587            mode:           the on/off detection mode
1588                            'paired' (default)
1589                            identifies 'off' scans by the
1590                            trailing '_R' (Mopra/Parkes) or
1591                            '_e'/'_w' (Tid) and matches
1592                            on/off pairs from the observing pattern
1593                            'time'
1594                            finds the closest off in time
1595
1596        """
1597        modes = ["time", "paired"]
1598        if not mode in modes:
1599            msg = "please provide valid mode. Valid modes are %s" % (modes)
1600            raise ValueError(msg)
1601        varlist = vars()
1602        s = None
1603        if mode.lower() == "paired":
1604            basesel = self.get_selection()
1605            sel = selector()+basesel
1606            sel.set_query("SRCTYPE==1")
1607            self.set_selection(sel)
1608            offs = self.copy()
1609            sel.set_query("SRCTYPE==0")
1610            self.set_selection(sel)
1611            ons = self.copy()
1612            s = scantable(self._math._quotient(ons, offs, preserve))
1613            self.set_selection(basesel)
1614        elif mode.lower() == "time":
1615            s = scantable(self._math._auto_quotient(self, mode, preserve))
1616        s._add_history("auto_quotient", varlist)
1617        return s
1618
1619    @print_log_dec
1620    def mx_quotient(self, mask = None, weight='median', preserve=True):
1621        """
1622        Form a quotient using "off" beams when observing in "MX" mode.
1623        Parameters:
1624            mask:           an optional mask to be used when weight == 'stddev'
1625            weight:         How to average the off beams.  Default is 'median'.
1626            preserve:       you can preserve (default) the continuum or
1627                            remove it.  The equations used are
1628                            preserve: Output = Toff * (on/off) - Toff
1629                            remove:   Output = Toff * (on/off) - Ton
1630        """
1631        mask = mask or ()
1632        varlist = vars()
1633        on = scantable(self._math._mx_extract(self, 'on'))
1634        preoff = scantable(self._math._mx_extract(self, 'off'))
1635        off = preoff.average_time(mask=mask, weight=weight, scanav=False)
1636        from asapmath  import quotient
1637        q = quotient(on, off, preserve)
1638        q._add_history("mx_quotient", varlist)
1639        return q
1640
1641    @print_log_dec
1642    def freq_switch(self, insitu=None):
1643        """
1644        Apply frequency switching to the data.
1645        Parameters:
1646            insitu:      if False a new scantable is returned.
1647                         Otherwise, the swictching is done in-situ
1648                         The default is taken from .asaprc (False)
1649        Example:
1650            none
1651        """
1652        if insitu is None: insitu = rcParams['insitu']
1653        self._math._setinsitu(insitu)
1654        varlist = vars()
1655        s = scantable(self._math._freqswitch(self))
1656        s._add_history("freq_switch", varlist)
1657        if insitu: self._assign(s)
1658        else: return s
1659
1660    @print_log_dec
1661    def recalc_azel(self):
1662        """
1663        Recalculate the azimuth and elevation for each position.
1664        Parameters:
1665            none
1666        Example:
1667        """
1668        varlist = vars()
1669        self._recalcazel()
1670        self._add_history("recalc_azel", varlist)
1671        return
1672
1673    @print_log_dec
1674    def __add__(self, other):
1675        varlist = vars()
1676        s = None
1677        if isinstance(other, scantable):
1678            s = scantable(self._math._binaryop(self, other, "ADD"))
1679        elif isinstance(other, float):
1680            s = scantable(self._math._unaryop(self, other, "ADD", False))
1681        else:
1682            raise TypeError("Other input is not a scantable or float value")
1683        s._add_history("operator +", varlist)
1684        return s
1685
1686    @print_log_dec
1687    def __sub__(self, other):
1688        """
1689        implicit on all axes and on Tsys
1690        """
1691        varlist = vars()
1692        s = None
1693        if isinstance(other, scantable):
1694            s = scantable(self._math._binaryop(self, other, "SUB"))
1695        elif isinstance(other, float):
1696            s = scantable(self._math._unaryop(self, other, "SUB", False))
1697        else:
1698            raise TypeError("Other input is not a scantable or float value")
1699        s._add_history("operator -", varlist)
1700        return s
1701
1702    @print_log_dec
1703    def __mul__(self, other):
1704        """
1705        implicit on all axes and on Tsys
1706        """
1707        varlist = vars()
1708        s = None
1709        if isinstance(other, scantable):
1710            s = scantable(self._math._binaryop(self, other, "MUL"))
1711        elif isinstance(other, float):
1712            s = scantable(self._math._unaryop(self, other, "MUL", False))
1713        else:
1714            raise TypeError("Other input is not a scantable or float value")
1715        s._add_history("operator *", varlist)
1716        return s
1717
1718
1719    @print_log_dec
1720    def __div__(self, other):
1721        """
1722        implicit on all axes and on Tsys
1723        """
1724        varlist = vars()
1725        s = None
1726        if isinstance(other, scantable):
1727            s = scantable(self._math._binaryop(self, other, "DIV"))
1728        elif isinstance(other, float):
1729            if other == 0.0:
1730                raise ZeroDivisionError("Dividing by zero is not recommended")
1731            s = scantable(self._math._unaryop(self, other, "DIV", False))
1732        else:
1733            raise TypeError("Other input is not a scantable or float value")
1734        s._add_history("operator /", varlist)
1735        return s
1736
1737    def get_fit(self, row=0):
1738        """
1739        Print or return the stored fits for a row in the scantable
1740        Parameters:
1741            row:    the row which the fit has been applied to.
1742        """
1743        if row > self.nrow():
1744            return
1745        from asap.asapfit import asapfit
1746        fit = asapfit(self._getfit(row))
1747        if rcParams['verbose']:
1748            print fit
1749            return
1750        else:
1751            return fit.as_dict()
1752
1753    def flag_nans(self):
1754        """
1755        Utility function to flag NaN values in the scantable.
1756        """
1757        import numpy
1758        basesel = self.get_selection()
1759        for i in range(self.nrow()):
1760            sel = self.get_row_selector(i)
1761            self.set_selection(basesel+sel)
1762            nans = numpy.isnan(self._getspectrum(0))
1763        if numpy.any(nans):
1764            bnans = [ bool(v) for v in nans]
1765            self.flag(bnans)
1766        self.set_selection(basesel)
1767
1768    def get_row_selector(self, rowno):
1769        return selector(beams=self.getbeam(rowno),
1770                        ifs=self.getif(rowno),
1771                        pols=self.getpol(rowno),
1772                        scans=self.getscan(rowno),
1773                        cycles=self.getcycle(rowno))
1774
1775    def _add_history(self, funcname, parameters):
1776        if not rcParams['scantable.history']:
1777            return
1778        # create date
1779        sep = "##"
1780        from datetime import datetime
1781        dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1782        hist = dstr+sep
1783        hist += funcname+sep#cdate+sep
1784        if parameters.has_key('self'): del parameters['self']
1785        for k, v in parameters.iteritems():
1786            if type(v) is dict:
1787                for k2, v2 in v.iteritems():
1788                    hist += k2
1789                    hist += "="
1790                    if isinstance(v2, scantable):
1791                        hist += 'scantable'
1792                    elif k2 == 'mask':
1793                        if isinstance(v2, list) or isinstance(v2, tuple):
1794                            hist += str(self._zip_mask(v2))
1795                        else:
1796                            hist += str(v2)
1797                    else:
1798                        hist += str(v2)
1799            else:
1800                hist += k
1801                hist += "="
1802                if isinstance(v, scantable):
1803                    hist += 'scantable'
1804                elif k == 'mask':
1805                    if isinstance(v, list) or isinstance(v, tuple):
1806                        hist += str(self._zip_mask(v))
1807                    else:
1808                        hist += str(v)
1809                else:
1810                    hist += str(v)
1811            hist += sep
1812        hist = hist[:-2] # remove trailing '##'
1813        self._addhistory(hist)
1814
1815
1816    def _zip_mask(self, mask):
1817        mask = list(mask)
1818        i = 0
1819        segments = []
1820        while mask[i:].count(1):
1821            i += mask[i:].index(1)
1822            if mask[i:].count(0):
1823                j = i + mask[i:].index(0)
1824            else:
1825                j = len(mask)
1826            segments.append([i, j])
1827            i = j
1828        return segments
1829
1830    def _get_ordinate_label(self):
1831        fu = "("+self.get_fluxunit()+")"
1832        import re
1833        lbl = "Intensity"
1834        if re.match(".K.", fu):
1835            lbl = "Brightness Temperature "+ fu
1836        elif re.match(".Jy.", fu):
1837            lbl = "Flux density "+ fu
1838        return lbl
1839
1840    def _check_ifs(self):
1841        nchans = [self.nchan(i) for i in range(self.nif(-1))]
1842        nchans = filter(lambda t: t > 0, nchans)
1843        return (sum(nchans)/len(nchans) == nchans[0])
1844
1845    def _fill(self, names, unit, average):
1846        import os
1847        from asap._asap import stfiller
1848        first = True
1849        fullnames = []
1850        for name in names:
1851            name = os.path.expandvars(name)
1852            name = os.path.expanduser(name)
1853            if not os.path.exists(name):
1854                msg = "File '%s' does not exists" % (name)
1855                if rcParams['verbose']:
1856                    asaplog.push(msg)
1857                    print asaplog.pop().strip()
1858                    return
1859                raise IOError(msg)
1860            fullnames.append(name)
1861        if average:
1862            asaplog.push('Auto averaging integrations')
1863        stype = int(rcParams['scantable.storage'].lower() == 'disk')
1864        for name in fullnames:
1865            tbl = Scantable(stype)
1866            r = stfiller(tbl)
1867            rx = rcParams['scantable.reference']
1868            r._setreferenceexpr(rx)
1869            msg = "Importing %s..." % (name)
1870            asaplog.push(msg, False)
1871            print_log()
1872            r._open(name, -1, -1)
1873            r._read()
1874            if average:
1875                tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
1876            if not first:
1877                tbl = self._math._merge([self, tbl])
1878            Scantable.__init__(self, tbl)
1879            r._close()
1880            del r, tbl
1881            first = False
1882        if unit is not None:
1883            self.set_fluxunit(unit)
1884        self.set_freqframe(rcParams['scantable.freqframe'])
1885
1886    def __getitem__(self, key):
1887        if key < 0:
1888            key += self.nrow()
1889        if key >= self.nrow():
1890            raise IndexError("Row index out of range.")
1891        return self._getspectrum(key)
1892
1893    def __setitem__(self, key, value):
1894        if key < 0:
1895            key += self.nrow()
1896        if key >= self.nrow():
1897            raise IndexError("Row index out of range.")
1898        if not hasattr(value, "__len__") or \
1899                len(value) > self.nchan(self.getif(key)):
1900            raise ValueError("Spectrum length doesn't match.")
1901        return self._setspectrum(value, key)
1902
1903    def __len__(self):
1904        return self.nrow()
1905
1906    def __iter__(self):
1907        for i in range(len(self)):
1908            yield self[i]
Note: See TracBrowser for help on using the repository browser.