source: trunk/python/scantable.py @ 1593

Last change on this file since 1593 was 1593, checked in by Malte Marquarding, 15 years ago

tidy up

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 69.5 KB
Line 
1from asap._asap import Scantable
2from asap import rcParams
3from asap import print_log, print_log_dec
4from asap import asaplog
5from asap import selector
6from asap import linecatalog
7from asap import _n_bools, mask_not, mask_and, mask_or
8
9class scantable(Scantable):
10    """
11        The ASAP container for scans
12    """
13    @print_log_dec
14    def __init__(self, filename, average=None, unit=None, parallactify=None):
15        """
16        Create a scantable from a saved one or make a reference
17        Parameters:
18            filename:    the name of an asap table on disk
19                         or
20                         the name of a rpfits/sdfits/ms file
21                         (integrations within scans are auto averaged
22                         and the whole file is read)
23                         or
24                         [advanced] a reference to an existing
25                         scantable
26            average:     average all integrations withinb a scan on read.
27                         The default (True) is taken from .asaprc.
28            unit:         brightness unit; must be consistent with K or Jy.
29                         Over-rides the default selected by the reader
30                         (input rpfits/sdfits/ms) or replaces the value
31                         in existing scantables
32            parallactify: Indcicate that the data had been parallatified.
33                          Default is taken form rc file.
34        """
35        if average is None:
36            average = rcParams['scantable.autoaverage']
37        parallactify = parallactify or rcParams['scantable.parallactify']
38        varlist = vars()
39        from asap._asap import stmath
40        self._math = stmath()
41        if isinstance(filename, Scantable):
42            Scantable.__init__(self, filename)
43        else:
44            if isinstance(filename, str):# or \
45#                (isinstance(filename, list) or isinstance(filename, tuple)) \
46#                  and isinstance(filename[-1], str):
47                import os.path
48                filename = os.path.expandvars(filename)
49                filename = os.path.expanduser(filename)
50                if not os.path.exists(filename):
51                    s = "File '%s' not found." % (filename)
52                    if rcParams['verbose']:
53                        asaplog.push(s)
54                        #print asaplog.pop().strip()
55                        return
56                    raise IOError(s)
57                if os.path.isdir(filename) \
58                    and not os.path.exists(filename+'/table.f1'):
59                    # crude check if asap table
60                    if os.path.exists(filename+'/table.info'):
61                        ondisk = rcParams['scantable.storage'] == 'disk'
62                        Scantable.__init__(self, filename, ondisk)
63                        if unit is not None:
64                            self.set_fluxunit(unit)
65                        self.set_freqframe(rcParams['scantable.freqframe'])
66                    else:
67                        msg = "The given file '%s'is not a valid " \
68                              "asap table." % (filename)
69                        if rcParams['verbose']:
70                            print msg
71                            return
72                        else:
73                            raise IOError(msg)
74                else:
75                    self._fill([filename], unit, average)
76            elif (isinstance(filename, list) or isinstance(filename, tuple)) \
77                  and isinstance(filename[-1], str):
78                self._fill(filename, unit, average)
79        self.parallactify(parallactify)
80        self._add_history("scantable", varlist)
81
82    @print_log_dec
83    def save(self, name=None, format=None, overwrite=False):
84        """
85        Store the scantable on disk. This can be an asap (aips++) Table,
86        SDFITS or MS2 format.
87        Parameters:
88            name:        the name of the outputfile. For format "ASCII"
89                         this is the root file name (data in 'name'.txt
90                         and header in 'name'_header.txt)
91            format:      an optional file format. Default is ASAP.
92                         Allowed are - 'ASAP' (save as ASAP [aips++] Table),
93                                       'SDFITS' (save as SDFITS file)
94                                       'ASCII' (saves as ascii text file)
95                                       'MS2' (saves as an aips++
96                                              MeasurementSet V2)
97                                       'FITS' (save as image FITS - not
98                                               readable by class)
99                                       'CLASS' (save as FITS readable by CLASS)
100            overwrite:   If the file should be overwritten if it exists.
101                         The default False is to return with warning
102                         without writing the output. USE WITH CARE.
103        Example:
104            scan.save('myscan.asap')
105            scan.save('myscan.sdfits', 'SDFITS')
106        """
107        from os import path
108        format = format or rcParams['scantable.save']
109        suffix = '.'+format.lower()
110        if name is None or name == "":
111            name = 'scantable'+suffix
112            msg = "No filename given. Using default name %s..." % name
113            asaplog.push(msg)
114        name = path.expandvars(name)
115        if path.isfile(name) or path.isdir(name):
116            if not overwrite:
117                msg = "File %s exists." % name
118                if rcParams['verbose']:
119                    print msg
120                    return
121                else:
122                    raise IOError(msg)
123        format2 = format.upper()
124        if format2 == 'ASAP':
125            self._save(name)
126        else:
127            from asap._asap import stwriter as stw
128            writer = stw(format2)
129            writer.write(self, name)
130        return
131
132    def copy(self):
133        """
134        Return a copy of this scantable.
135        Note:
136            This makes a full (deep) copy. scan2 = scan1 makes a reference.
137        Parameters:
138            none
139        Example:
140            copiedscan = scan.copy()
141        """
142        sd = scantable(Scantable._copy(self))
143        return sd
144
145    def drop_scan(self, scanid=None):
146        """
147        Return a new scantable where the specified scan number(s) has(have)
148        been dropped.
149        Parameters:
150            scanid:    a (list of) scan number(s)
151        """
152        from asap import _is_sequence_or_number as _is_valid
153        from asap import _to_list
154        from asap import unique
155        if not _is_valid(scanid):
156            if rcParams['verbose']:
157                print "Please specify a scanno to drop from the scantable"
158                return
159            else:
160                raise RuntimeError("No scan given")
161        try:
162            scanid = _to_list(scanid)
163            allscans = unique([ self.getscan(i) for i in range(self.nrow())])
164            for sid in scanid: allscans.remove(sid)
165            if len(allscans) == 0:
166                raise ValueError("Can't remove all scans")
167        except ValueError:
168            if rcParams['verbose']:
169                print "Couldn't find any match."
170                return
171            else: raise
172        try:
173            bsel = self.get_selection()
174            sel = selector(scans=allscans)
175            self.set_selection(bsel+sel)
176            scopy = self._copy()
177            self.set_selection(bsel)
178            return scantable(scopy)
179        except RuntimeError:
180            if rcParams['verbose']:
181                print "Couldn't find any match."
182            else:
183                raise
184
185    def get_scan(self, scanid=None):
186        """
187        Return a specific scan (by scanno) or collection of scans (by
188        source name) in a new scantable.
189        Note:
190            See scantable.drop_scan() for the inverse operation.
191        Parameters:
192            scanid:    a (list of) scanno or a source name, unix-style
193                       patterns are accepted for source name matching, e.g.
194                       '*_R' gets all 'ref scans
195        Example:
196            # get all scans containing the source '323p459'
197            newscan = scan.get_scan('323p459')
198            # get all 'off' scans
199            refscans = scan.get_scan('*_R')
200            # get a susbset of scans by scanno (as listed in scan.summary())
201            newscan = scan.get_scan([0, 2, 7, 10])
202        """
203        if scanid is None:
204            if rcParams['verbose']:
205                print "Please specify a scan no or name to " \
206                      "retrieve from the scantable"
207                return
208            else:
209                raise RuntimeError("No scan given")
210
211        try:
212            bsel = self.get_selection()
213            sel = selector()
214            if type(scanid) is str:
215                sel.set_name(scanid)
216                self.set_selection(bsel+sel)
217                scopy = self._copy()
218                self.set_selection(bsel)
219                return scantable(scopy)
220            elif type(scanid) is int:
221                sel.set_scans([scanid])
222                self.set_selection(bsel+sel)
223                scopy = self._copy()
224                self.set_selection(bsel)
225                return scantable(scopy)
226            elif type(scanid) is list:
227                sel.set_scans(scanid)
228                self.set_selection(sel)
229                scopy = self._copy()
230                self.set_selection(bsel)
231                return scantable(scopy)
232            else:
233                msg = "Illegal scanid type, use 'int' or 'list' if ints."
234                if rcParams['verbose']:
235                    print msg
236                else:
237                    raise TypeError(msg)
238        except RuntimeError:
239            if rcParams['verbose']: print "Couldn't find any match."
240            else: raise
241
242    def __str__(self):
243        return Scantable._summary(self, True)
244
245    def summary(self, filename=None):
246        """
247        Print a summary of the contents of this scantable.
248        Parameters:
249            filename:    the name of a file to write the putput to
250                         Default - no file output
251            verbose:     print extra info such as the frequency table
252                         The default (False) is taken from .asaprc
253        """
254        info = Scantable._summary(self, True)
255        if filename is not None:
256            if filename is "":
257                filename = 'scantable_summary.txt'
258            from os.path import expandvars, isdir
259            filename = expandvars(filename)
260            if not isdir(filename):
261                data = open(filename, 'w')
262                data.write(info)
263                data.close()
264            else:
265                msg = "Illegal file name '%s'." % (filename)
266                if rcParams['verbose']:
267                    print msg
268                else:
269                    raise IOError(msg)
270        if rcParams['verbose']:
271            try:
272                from IPython.genutils import page as pager
273            except ImportError:
274                from pydoc import pager
275            pager(info)
276        else:
277            return info
278
279    def get_spectrum(self, rowno):
280        """Return the spectrum for the current row in the scantable as a list.
281        Parameters:
282             rowno:   the row number to retrieve the spectrum from
283        """
284        return self._getspectrum(rowno)
285
286    def get_mask(self, rowno):
287        """Return the mask for the current row in the scantable as a list.
288        Parameters:
289             rowno:   the row number to retrieve the mask from
290        """
291        return self._getmask(rowno)
292
293    def set_spectrum(self, spec, rowno):
294        """Return the spectrum for the current row in the scantable as a list.
295        Parameters:
296             spec:   the spectrum
297             rowno:    the row number to set the spectrum for
298        """
299        assert(len(spec) == self.nchan())
300        return self._setspectrum(spec, rowno)
301
302    def get_selection(self):
303        """
304        Get the selection object currently set on this scantable.
305        Parameters:
306            none
307        Example:
308            sel = scan.get_selection()
309            sel.set_ifs(0)              # select IF 0
310            scan.set_selection(sel)     # apply modified selection
311        """
312        return selector(self._getselection())
313
314    def set_selection(self, selection=None, **kw):
315        """
316        Select a subset of the data. All following operations on this scantable
317        are only applied to thi selection.
318        Parameters:
319            selection:    a selector object (default unset the selection)
320        Examples:
321            sel = selector()         # create a selection object
322            self.set_scans([0, 3])    # select SCANNO 0 and 3
323            scan.set_selection(sel)  # set the selection
324            scan.summary()           # will only print summary of scanno 0 an 3
325            scan.set_selection()     # unset the selection
326        """
327        if selection is None:
328            # reset
329            if len(kw) == 0:
330                selection = selector()
331            else:
332                # try keywords
333                for k in kw:
334                    if k not in selector.fields:
335                        raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
336                selection = selector(**kw)
337        self._setselection(selection)
338
339    def stats(self, stat='stddev', mask=None):
340        """
341        Determine the specified statistic of the current beam/if/pol
342        Takes a 'mask' as an optional parameter to specify which
343        channels should be excluded.
344        Parameters:
345            stat:    'min', 'max', 'sumsq', 'sum', 'mean'
346                     'var', 'stddev', 'avdev', 'rms', 'median'
347            mask:    an optional mask specifying where the statistic
348                     should be determined.
349        Example:
350            scan.set_unit('channel')
351            msk = scan.create_mask([100, 200], [500, 600])
352            scan.stats(stat='mean', mask=m)
353        """
354        mask = mask or []
355        if not self._check_ifs():
356            raise ValueError("Cannot apply mask as the IFs have different "
357                             "number of channels. Please use setselection() "
358                             "to select individual IFs")
359
360        statvals = self._math._stats(self, mask, stat)
361        def cb(i):
362            return statvals[i]
363
364        return self._row_callback(cb, stat)
365
366    def stddev(self, mask=None):
367        """
368        Determine the standard deviation of the current beam/if/pol
369        Takes a 'mask' as an optional parameter to specify which
370        channels should be excluded.
371        Parameters:
372            mask:    an optional mask specifying where the standard
373                     deviation should be determined.
374
375        Example:
376            scan.set_unit('channel')
377            msk = scan.create_mask([100, 200], [500, 600])
378            scan.stddev(mask=m)
379        """
380        return self.stats(stat='stddev', mask=mask);
381
382
383    def get_column_names(self):
384        """
385        Return a  list of column names, which can be used for selection.
386        """
387        return list(Scantable.get_column_names(self))
388
389    def get_tsys(self):
390        """
391        Return the System temperatures.
392        Returns:
393            a list of Tsys values for the current selection
394        """
395
396        return self._row_callback(self._gettsys, "Tsys")
397
398    def _row_callback(self, callback, label):
399        out = ""
400        outvec = []
401        sep = '-'*50
402        for i in range(self.nrow()):
403            tm = self._gettime(i)
404            src = self._getsourcename(i)
405            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
406            out += 'Time[%s]:\n' % (tm)
407            if self.nbeam(-1) > 1:
408                out +=  ' Beam[%d] ' % (self.getbeam(i))
409            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (self.getif(i))
410            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (self.getpol(i))
411            outvec.append(callback(i))
412            out += '= %3.3f\n' % (outvec[i])
413            out +=  sep+'\n'
414        if rcParams['verbose']:
415            print sep
416            print " %s" % (label)
417            print sep
418            print out
419        return outvec
420
421    def _get_column(self, callback, row=-1):
422        """
423        """
424        if row == -1:
425            return [callback(i) for i in range(self.nrow())]
426        else:
427            if  0 <= row < self.nrow():
428                return callback(row)
429
430
431    def get_time(self, row=-1, asdatetime=False):
432        """
433        Get a list of time stamps for the observations.
434        Return a datetime object for each integration time stamp in the scantable.
435        Parameters:
436            row:          row no of integration. Default -1 return all rows
437            asdatetime:   return values as datetime objects rather than strings
438        Example:
439            none
440        """
441        from time import strptime
442        from datetime import datetime
443        times = self._get_column(self._gettime, row)
444        if not asdatetime:
445            return times
446        format = "%Y/%m/%d/%H:%M:%S"
447        if isinstance(times, list):
448            return [datetime(*strptime(i, format)[:6]) for i in times]
449        else:
450            return datetime(*strptime(times, format)[:6])
451
452
453    def get_inttime(self, row=-1):
454        """
455        Get a list of integration times for the observations.
456        Return a time in seconds for each integration in the scantable.
457        Parameters:
458            row:    row no of integration. Default -1 return all rows.
459        Example:
460            none
461        """
462        return self._get_column(self._getinttime, row)
463
464
465    def get_sourcename(self, row=-1):
466        """
467        Get a list source names for the observations.
468        Return a string for each integration in the scantable.
469        Parameters:
470            row:    row no of integration. Default -1 return all rows.
471        Example:
472            none
473        """
474        return self._get_column(self._getsourcename, row)
475
476    def get_elevation(self, row=-1):
477        """
478        Get a list of elevations for the observations.
479        Return a float for each integration in the scantable.
480        Parameters:
481            row:    row no of integration. Default -1 return all rows.
482        Example:
483            none
484        """
485        return self._get_column(self._getelevation, row)
486
487    def get_azimuth(self, row=-1):
488        """
489        Get a list of azimuths for the observations.
490        Return a float for each integration in the scantable.
491        Parameters:
492            row:    row no of integration. Default -1 return all rows.
493        Example:
494            none
495        """
496        return self._get_column(self._getazimuth, row)
497
498    def get_parangle(self, row=-1):
499        """
500        Get a list of parallactic angles for the observations.
501        Return a float for each integration in the scantable.
502        Parameters:
503            row:    row no of integration. Default -1 return all rows.
504        Example:
505            none
506        """
507        return self._get_column(self._getparangle, row)
508
509    def get_direction(self, row=-1):
510        """
511        Get a list of Positions on the sky (direction) for the observations.
512        Return a float for each integration in the scantable.
513        Parameters:
514            row:    row no of integration. Default -1 return all rows
515        Example:
516            none
517        """
518        return self._get_column(self._getdirection, row)
519
520    def get_directionval(self, row=-1):
521        """
522        Get a list of Positions on the sky (direction) for the observations.
523        Return a float for each integration in the scantable.
524        Parameters:
525            row:    row no of integration. Default -1 return all rows
526        Example:
527            none
528        """
529        return self._get_column(self._getdirectionvec, row)
530
531
532    def set_unit(self, unit='channel'):
533        """
534        Set the unit for all following operations on this scantable
535        Parameters:
536            unit:    optional unit, default is 'channel'
537                     one of '*Hz', 'km/s', 'channel', ''
538        """
539        varlist = vars()
540        if unit in ['', 'pixel', 'channel']:
541            unit = ''
542        inf = list(self._getcoordinfo())
543        inf[0] = unit
544        self._setcoordinfo(inf)
545        self._add_history("set_unit", varlist)
546
547    @print_log_dec
548    def set_instrument(self, instr):
549        """
550        Set the instrument for subsequent processing.
551        Parameters:
552            instr:    Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
553                      'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
554        """
555        self._setInstrument(instr)
556        self._add_history("set_instument", vars())
557
558    @print_log_dec
559    def set_feedtype(self, feedtype):
560        """
561        Overwrite the feed type, which might not be set correctly.
562        Parameters:
563            feedtype:     'linear' or 'circular'
564        """
565        self._setfeedtype(feedtype)
566        self._add_history("set_feedtype", vars())
567
568    @print_log_dec
569    def set_doppler(self, doppler='RADIO'):
570        """
571        Set the doppler for all following operations on this scantable.
572        Parameters:
573            doppler:    One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
574        """
575        varlist = vars()
576        inf = list(self._getcoordinfo())
577        inf[2] = doppler
578        self._setcoordinfo(inf)
579        self._add_history("set_doppler", vars())
580
581    @print_log_dec
582    def set_freqframe(self, frame=None):
583        """
584        Set the frame type of the Spectral Axis.
585        Parameters:
586            frame:   an optional frame type, default 'LSRK'. Valid frames are:
587                     'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
588                     'GEO', 'GALACTO', 'LGROUP', 'CMB'
589        Examples:
590            scan.set_freqframe('BARY')
591        """
592        frame = frame or rcParams['scantable.freqframe']
593        varlist = vars()
594        valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
595                   'GEO', 'GALACTO', 'LGROUP', 'CMB']
596
597        if frame in valid:
598            inf = list(self._getcoordinfo())
599            inf[1] = frame
600            self._setcoordinfo(inf)
601            self._add_history("set_freqframe", varlist)
602        else:
603            msg  = "Please specify a valid freq type. Valid types are:\n", valid
604            if rcParams['verbose']:
605                print msg
606            else:
607                raise TypeError(msg)
608
609    def set_dirframe(self, frame=""):
610        """
611        Set the frame type of the Direction on the sky.
612        Parameters:
613            frame:   an optional frame type, default ''. Valid frames are:
614                     'J2000', 'B1950', 'GALACTIC'
615        Examples:
616            scan.set_dirframe('GALACTIC')
617        """
618        varlist = vars()
619        try:
620            Scantable.set_dirframe(self, frame)
621        except RuntimeError, msg:
622            if rcParams['verbose']:
623                print msg
624            else:
625                raise
626        self._add_history("set_dirframe", varlist)
627
628    def get_unit(self):
629        """
630        Get the default unit set in this scantable
631        Returns:
632            A unit string
633        """
634        inf = self._getcoordinfo()
635        unit = inf[0]
636        if unit == '': unit = 'channel'
637        return unit
638
639    def get_abcissa(self, rowno=0):
640        """
641        Get the abcissa in the current coordinate setup for the currently
642        selected Beam/IF/Pol
643        Parameters:
644            rowno:    an optional row number in the scantable. Default is the
645                      first row, i.e. rowno=0
646        Returns:
647            The abcissa values and the format string (as a dictionary)
648        """
649        abc = self._getabcissa(rowno)
650        lbl = self._getabcissalabel(rowno)
651        return abc, lbl
652
653    def flag(self, mask=None):
654        """
655        Flag the selected data using an optional channel mask.
656        Parameters:
657            mask:   an optional channel mask, created with create_mask. Default
658                    (no mask) is all channels.
659        """
660        varlist = vars()
661        mask = mask or []
662        try:
663            self._flag(mask)
664        except RuntimeError, msg:
665            if rcParams['verbose']:
666                print msg
667                return
668            else: raise
669        self._add_history("flag", varlist)
670
671    @print_log_dec
672    def lag_flag(self, start, end, unit="MHz", insitu=None):
673        """
674        Flag the data in 'lag' space by providing a frequency to remove.
675        Flagged data in the scantable gets interpolated over the region.
676        No taper is applied.
677        Parameters:
678            start:    the start frequency (really a period within the
679                      bandwidth)  or period to remove
680            end:      the end frequency or period to remove
681            unit:     the frequency unit (default "MHz") or "" for
682                      explicit lag channels
683        Notes:
684            It is recommended to flag edges of the band or strong
685            signals beforehand.
686        """
687        if insitu is None: insitu = rcParams['insitu']
688        self._math._setinsitu(insitu)
689        varlist = vars()
690        base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
691        if not (unit == "" or base.has_key(unit)):
692            raise ValueError("%s is not a valid unit." % unit)
693        try:
694            if unit == "":
695                s = scantable(self._math._lag_flag(self, start, end, "lags"))
696            else:
697                s = scantable(self._math._lag_flag(self, start*base[unit],
698                                                   end*base[unit], "frequency"))
699        except RuntimeError, msg:
700            if rcParams['verbose']:
701                print msg
702                return
703            else: raise
704        s._add_history("lag_flag", varlist)
705        if insitu:
706            self._assign(s)
707        else:
708            return s
709
710    @print_log_dec
711    def create_mask(self, *args, **kwargs):
712        """
713        Compute and return a mask based on [min, max] windows.
714        The specified windows are to be INCLUDED, when the mask is
715        applied.
716        Parameters:
717            [min, max], [min2, max2], ...
718                Pairs of start/end points (inclusive)specifying the regions
719                to be masked
720            invert:     optional argument. If specified as True,
721                        return an inverted mask, i.e. the regions
722                        specified are EXCLUDED
723            row:        create the mask using the specified row for
724                        unit conversions, default is row=0
725                        only necessary if frequency varies over rows.
726        Example:
727            scan.set_unit('channel')
728            a)
729            msk = scan.create_mask([400, 500], [800, 900])
730            # masks everything outside 400 and 500
731            # and 800 and 900 in the unit 'channel'
732
733            b)
734            msk = scan.create_mask([400, 500], [800, 900], invert=True)
735            # masks the regions between 400 and 500
736            # and 800 and 900 in the unit 'channel'
737            c)
738            mask only channel 400
739            msk =  scan.create_mask([400])
740        """
741        row = kwargs.get("row", 0)
742        data = self._getabcissa(row)
743        u = self._getcoordinfo()[0]
744        if rcParams['verbose']:
745            if u == "": u = "channel"
746            msg = "The current mask window unit is %s" % u
747            i = self._check_ifs()
748            if not i:
749                msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
750            asaplog.push(msg)
751        n = self.nchan()
752        msk = _n_bools(n, False)
753        # test if args is a 'list' or a 'normal *args - UGLY!!!
754
755        ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
756             and args or args[0]
757        for window in ws:
758            if len(window) == 1:
759                window = [window[0], window[0]]
760            if len(window) == 0 or  len(window) > 2:
761                raise ValueError("A window needs to be defined as [start(, end)]")
762            if window[0] > window[1]:
763                tmp = window[0]
764                window[0] = window[1]
765                window[1] = tmp
766            for i in range(n):
767                if data[i] >= window[0] and data[i] <= window[1]:
768                    msk[i] = True
769        if kwargs.has_key('invert'):
770            if kwargs.get('invert'):
771                msk = mask_not(msk)
772        return msk
773
774    def get_restfreqs(self):
775        """
776        Get the restfrequency(s) stored in this scantable.
777        The return value(s) are always of unit 'Hz'
778        Parameters:
779            none
780        Returns:
781            a list of doubles
782        """
783        return list(self._getrestfreqs())
784
785
786    def set_restfreqs(self, freqs=None, unit='Hz'):
787        """
788        Set or replace the restfrequency specified and
789        If the 'freqs' argument holds a scalar,
790        then that rest frequency will be applied to all the selected
791        data.  If the 'freqs' argument holds
792        a vector, then it MUST be of equal or smaller length than
793        the number of IFs (and the available restfrequencies will be
794        replaced by this vector).  In this case, *all* data have
795        the restfrequency set per IF according
796        to the corresponding value you give in the 'freqs' vector.
797        E.g. 'freqs=[1e9, 2e9]'  would mean IF 0 gets restfreq 1e9 and
798        IF 1 gets restfreq 2e9.
799        You can also specify the frequencies via a linecatalog.
800
801        Parameters:
802            freqs:   list of rest frequency values or string idenitfiers
803            unit:    unit for rest frequency (default 'Hz')
804
805        Example:
806            # set the given restfrequency for the whole table
807            scan.set_restfreqs(freqs=1.4e9)
808            # If thee number of IFs in the data is >= 2 IF0 gets the first
809            # value IF1 the second...
810            scan.set_restfreqs(freqs=[1.4e9, 1.67e9])
811            #set the given restfrequency for the whole table (by name)
812            scan.set_restfreqs(freqs="OH1667")
813
814        Note:
815            To do more sophisticate Restfrequency setting, e.g. on a
816            source and IF basis, use scantable.set_selection() before using
817            this function.
818            # provide your scantable is called scan
819            selection = selector()
820            selection.set_name("ORION*")
821            selection.set_ifs([1])
822            scan.set_selection(selection)
823            scan.set_restfreqs(freqs=86.6e9)
824
825        """
826        varlist = vars()
827        from asap import linecatalog
828        # simple  value
829        if isinstance(freqs, int) or isinstance(freqs, float):
830            self._setrestfreqs(freqs, "",unit)
831        # list of values
832        elif isinstance(freqs, list) or isinstance(freqs, tuple):
833            # list values are scalars
834            if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
835                sel = selector()
836                savesel = self._getselection()
837                iflist = self.getifnos()
838                for i in xrange(len(freqs)):
839                    sel.set_ifs(iflist[i])
840                    self._setselection(sel)
841                    self._setrestfreqs(freqs[i], "",unit)
842                self._setselection(savesel)
843            # list values are tuples, (value, name)
844            elif isinstance(freqs[-1], dict):
845                sel = selector()
846                savesel = self._getselection()
847                iflist = self.getifnos()
848                for i in xrange(len(freqs)):
849                    sel.set_ifs(iflist[i])
850                    self._setselection(sel)
851                    self._setrestfreqs(freqs[i]["value"],
852                                       freqs[i]["name"], "MHz")
853                self._setselection(savesel)
854        # freqs are to be taken from a linecatalog
855        elif isinstance(freqs, linecatalog):
856            sel = selector()
857            savesel = self._getselection()
858            iflist = self.getifnos()
859            for i in xrange(freqs.nrow()):
860                sel.set_ifs(iflist[i])
861                self._setselection(sel)
862                self._setrestfreqs(freqs.get_frequency(i),
863                                   freqs.get_name(i), "MHz")
864                # ensure that we are not iterating past nIF
865                if i == self.nif()-1: break
866            self._setselection(savesel)
867        else:
868            return
869        self._add_history("set_restfreqs", varlist)
870
871    def shift_refpix(self, delta):
872        """
873        Shift the reference pixel of the Spectra Coordinate by an
874        integer amount.
875        Parameters:
876            delta:   the amount to shift by
877        Note:
878            Be careful using this with broadband data.
879        """
880        Scantable.shift(self, delta)
881
882    def history(self, filename=None):
883        """
884        Print the history. Optionally to a file.
885        Parameters:
886            filename:    The name  of the file to save the history to.
887        """
888        hist = list(self._gethistory())
889        out = "-"*80
890        for h in hist:
891            if h.startswith("---"):
892                out += "\n"+h
893            else:
894                items = h.split("##")
895                date = items[0]
896                func = items[1]
897                items = items[2:]
898                out += "\n"+date+"\n"
899                out += "Function: %s\n  Parameters:" % (func)
900                for i in items:
901                    s = i.split("=")
902                    out += "\n   %s = %s" % (s[0], s[1])
903                out += "\n"+"-"*80
904        if filename is not None:
905            if filename is "":
906                filename = 'scantable_history.txt'
907            import os
908            filename = os.path.expandvars(os.path.expanduser(filename))
909            if not os.path.isdir(filename):
910                data = open(filename, 'w')
911                data.write(out)
912                data.close()
913            else:
914                msg = "Illegal file name '%s'." % (filename)
915                if rcParams['verbose']:
916                    print msg
917                else:
918                    raise IOError(msg)
919        if rcParams['verbose']:
920            try:
921                from IPython.genutils import page as pager
922            except ImportError:
923                from pydoc import pager
924            pager(out)
925        else:
926            return out
927        return
928    #
929    # Maths business
930    #
931    @print_log_dec
932    def average_time(self, mask=None, scanav=False, weight='tint', align=False):
933        """
934        Return the (time) weighted average of a scan.
935        Note:
936            in channels only - align if necessary
937        Parameters:
938            mask:     an optional mask (only used for 'var' and 'tsys'
939                      weighting)
940            scanav:   True averages each scan separately
941                      False (default) averages all scans together,
942            weight:   Weighting scheme.
943                      'none'     (mean no weight)
944                      'var'      (1/var(spec) weighted)
945                      'tsys'     (1/Tsys**2 weighted)
946                      'tint'     (integration time weighted)
947                      'tintsys'  (Tint/Tsys**2)
948                      'median'   ( median averaging)
949                      The default is 'tint'
950            align:    align the spectra in velocity before averaging. It takes
951                      the time of the first spectrum as reference time.
952        Example:
953            # time average the scantable without using a mask
954            newscan = scan.average_time()
955        """
956        varlist = vars()
957        weight = weight or 'TINT'
958        mask = mask or ()
959        scanav = (scanav and 'SCAN') or 'NONE'
960        scan = (self, )
961        try:
962            if align:
963                scan = (self.freq_align(insitu=False), )
964            s = None
965            if weight.upper() == 'MEDIAN':
966                s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
967                                                         scanav))
968            else:
969                s = scantable(self._math._average(scan, mask, weight.upper(),
970                              scanav))
971        except RuntimeError, msg:
972            if rcParams['verbose']:
973                print msg
974                return
975            else: raise
976        s._add_history("average_time", varlist)
977        return s
978
979    @print_log_dec
980    def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
981        """
982        Return a scan where all spectra are converted to either
983        Jansky or Kelvin depending upon the flux units of the scan table.
984        By default the function tries to look the values up internally.
985        If it can't find them (or if you want to over-ride), you must
986        specify EITHER jyperk OR eta (and D which it will try to look up
987        also if you don't set it). jyperk takes precedence if you set both.
988        Parameters:
989            jyperk:      the Jy / K conversion factor
990            eta:         the aperture efficiency
991            d:           the geomtric diameter (metres)
992            insitu:      if False a new scantable is returned.
993                         Otherwise, the scaling is done in-situ
994                         The default is taken from .asaprc (False)
995        """
996        if insitu is None: insitu = rcParams['insitu']
997        self._math._setinsitu(insitu)
998        varlist = vars()
999        jyperk = jyperk or -1.0
1000        d = d or -1.0
1001        eta = eta or -1.0
1002        s = scantable(self._math._convertflux(self, d, eta, jyperk))
1003        s._add_history("convert_flux", varlist)
1004        if insitu: self._assign(s)
1005        else: return s
1006
1007    @print_log_dec
1008    def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1009        """
1010        Return a scan after applying a gain-elevation correction.
1011        The correction can be made via either a polynomial or a
1012        table-based interpolation (and extrapolation if necessary).
1013        You specify polynomial coefficients, an ascii table or neither.
1014        If you specify neither, then a polynomial correction will be made
1015        with built in coefficients known for certain telescopes (an error
1016        will occur if the instrument is not known).
1017        The data and Tsys are *divided* by the scaling factors.
1018        Parameters:
1019            poly:        Polynomial coefficients (default None) to compute a
1020                         gain-elevation correction as a function of
1021                         elevation (in degrees).
1022            filename:    The name of an ascii file holding correction factors.
1023                         The first row of the ascii file must give the column
1024                         names and these MUST include columns
1025                         "ELEVATION" (degrees) and "FACTOR" (multiply data
1026                         by this) somewhere.
1027                         The second row must give the data type of the
1028                         column. Use 'R' for Real and 'I' for Integer.
1029                         An example file would be
1030                         (actual factors are arbitrary) :
1031
1032                         TIME ELEVATION FACTOR
1033                         R R R
1034                         0.1 0 0.8
1035                         0.2 20 0.85
1036                         0.3 40 0.9
1037                         0.4 60 0.85
1038                         0.5 80 0.8
1039                         0.6 90 0.75
1040            method:      Interpolation method when correcting from a table.
1041                         Values are  "nearest", "linear" (default), "cubic"
1042                         and "spline"
1043            insitu:      if False a new scantable is returned.
1044                         Otherwise, the scaling is done in-situ
1045                         The default is taken from .asaprc (False)
1046        """
1047
1048        if insitu is None: insitu = rcParams['insitu']
1049        self._math._setinsitu(insitu)
1050        varlist = vars()
1051        poly = poly or ()
1052        from os.path import expandvars
1053        filename = expandvars(filename)
1054        s = scantable(self._math._gainel(self, poly, filename, method))
1055        s._add_history("gain_el", varlist)
1056        if insitu:
1057            self._assign(s)
1058        else:
1059            return s
1060
1061    @print_log_dec
1062    def freq_align(self, reftime=None, method='cubic', insitu=None):
1063        """
1064        Return a scan where all rows have been aligned in frequency/velocity.
1065        The alignment frequency frame (e.g. LSRK) is that set by function
1066        set_freqframe.
1067        Parameters:
1068            reftime:     reference time to align at. By default, the time of
1069                         the first row of data is used.
1070            method:      Interpolation method for regridding the spectra.
1071                         Choose from "nearest", "linear", "cubic" (default)
1072                         and "spline"
1073            insitu:      if False a new scantable is returned.
1074                         Otherwise, the scaling is done in-situ
1075                         The default is taken from .asaprc (False)
1076        """
1077        if insitu is None: insitu = rcParams["insitu"]
1078        self._math._setinsitu(insitu)
1079        varlist = vars()
1080        reftime = reftime or ""
1081        s = scantable(self._math._freq_align(self, reftime, method))
1082        s._add_history("freq_align", varlist)
1083        if insitu: self._assign(s)
1084        else: return s
1085
1086    @print_log_dec
1087    def opacity(self, tau, insitu=None):
1088        """
1089        Apply an opacity correction. The data
1090        and Tsys are multiplied by the correction factor.
1091        Parameters:
1092            tau:         Opacity from which the correction factor is
1093                         exp(tau*ZD)
1094                         where ZD is the zenith-distance
1095            insitu:      if False a new scantable is returned.
1096                         Otherwise, the scaling is done in-situ
1097                         The default is taken from .asaprc (False)
1098        """
1099        if insitu is None: insitu = rcParams['insitu']
1100        self._math._setinsitu(insitu)
1101        varlist = vars()
1102        s = scantable(self._math._opacity(self, tau))
1103        s._add_history("opacity", varlist)
1104        if insitu: self._assign(s)
1105        else: return s
1106
1107    @print_log_dec
1108    def bin(self, width=5, insitu=None):
1109        """
1110        Return a scan where all spectra have been binned up.
1111        Parameters:
1112            width:       The bin width (default=5) in pixels
1113            insitu:      if False a new scantable is returned.
1114                         Otherwise, the scaling is done in-situ
1115                         The default is taken from .asaprc (False)
1116        """
1117        if insitu is None: insitu = rcParams['insitu']
1118        self._math._setinsitu(insitu)
1119        varlist = vars()
1120        s = scantable(self._math._bin(self, width))
1121        s._add_history("bin", varlist)
1122        if insitu:
1123            self._assign(s)
1124        else:
1125            return s
1126
1127    @print_log_dec
1128    def resample(self, width=5, method='cubic', insitu=None):
1129        """
1130        Return a scan where all spectra have been binned up.
1131
1132        Parameters:
1133            width:       The bin width (default=5) in pixels
1134            method:      Interpolation method when correcting from a table.
1135                         Values are  "nearest", "linear", "cubic" (default)
1136                         and "spline"
1137            insitu:      if False a new scantable is returned.
1138                         Otherwise, the scaling is done in-situ
1139                         The default is taken from .asaprc (False)
1140        """
1141        if insitu is None: insitu = rcParams['insitu']
1142        self._math._setinsitu(insitu)
1143        varlist = vars()
1144        s = scantable(self._math._resample(self, method, width))
1145        s._add_history("resample", varlist)
1146        if insitu: self._assign(s)
1147        else: return s
1148
1149    @print_log_dec
1150    def average_pol(self, mask=None, weight='none'):
1151        """
1152        Average the Polarisations together.
1153        Parameters:
1154            mask:        An optional mask defining the region, where the
1155                         averaging will be applied. The output will have all
1156                         specified points masked.
1157            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1158                         weighted), or 'tsys' (1/Tsys**2 weighted)
1159        """
1160        varlist = vars()
1161        mask = mask or ()
1162        s = scantable(self._math._averagepol(self, mask, weight.upper()))
1163        s._add_history("average_pol", varlist)
1164        return s
1165
1166    @print_log_dec
1167    def average_beam(self, mask=None, weight='none'):
1168        """
1169        Average the Beams together.
1170        Parameters:
1171            mask:        An optional mask defining the region, where the
1172                         averaging will be applied. The output will have all
1173                         specified points masked.
1174            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1175                         weighted), or 'tsys' (1/Tsys**2 weighted)
1176        """
1177        varlist = vars()
1178        mask = mask or ()
1179        s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1180        s._add_history("average_beam", varlist)
1181        return s
1182
1183    def parallactify(self, pflag):
1184        varlist = vars()
1185        self._parallactify(pflag)
1186        self._add_history("parallactify", varlist)
1187
1188    @print_log_dec
1189    def convert_pol(self, poltype=None):
1190        """
1191        Convert the data to a different polarisation type.
1192        Note that you will need cross-polarisation terms for most conversions.
1193        Parameters:
1194            poltype:    The new polarisation type. Valid types are:
1195                        "linear", "circular", "stokes" and "linpol"
1196        """
1197        varlist = vars()
1198        try:
1199            s = scantable(self._math._convertpol(self, poltype))
1200        except RuntimeError, msg:
1201            if rcParams['verbose']:
1202                print msg
1203                return
1204            else:
1205                raise
1206        s._add_history("convert_pol", varlist)
1207        return s
1208
1209    @print_log_dec
1210    def smooth(self, kernel="hanning", width=5.0, order=2, insitu=None):
1211        """
1212        Smooth the spectrum by the specified kernel (conserving flux).
1213        Parameters:
1214            kernel:     The type of smoothing kernel. Select from
1215                        'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1216                        or 'poly'
1217            width:      The width of the kernel in pixels. For hanning this is
1218                        ignored otherwise it defauls to 5 pixels.
1219                        For 'gaussian' it is the Full Width Half
1220                        Maximum. For 'boxcar' it is the full width.
1221                        For 'rmedian' and 'poly' it is the half width.
1222            order:      Optional parameter for 'poly' kernel (default is 2), to
1223                        specify the order of the polnomial. Ignored by all other
1224                        kernels.
1225            insitu:     if False a new scantable is returned.
1226                        Otherwise, the scaling is done in-situ
1227                        The default is taken from .asaprc (False)
1228        Example:
1229             none
1230        """
1231        if insitu is None: insitu = rcParams['insitu']
1232        self._math._setinsitu(insitu)
1233        varlist = vars()
1234        s = scantable(self._math._smooth(self, kernel.lower(), width, order))
1235        s._add_history("smooth", varlist)
1236        if insitu: self._assign(s)
1237        else: return s
1238
1239    @print_log_dec
1240    def poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None):
1241        """
1242        Return a scan which has been baselined (all rows) by a polynomial.
1243        Parameters:
1244            mask:       an optional mask
1245            order:      the order of the polynomial (default is 0)
1246            plot:       plot the fit and the residual. In this each
1247                        indivual fit has to be approved, by typing 'y'
1248                        or 'n'
1249            uselin:     use linear polynomial fit
1250            insitu:     if False a new scantable is returned.
1251                        Otherwise, the scaling is done in-situ
1252                        The default is taken from .asaprc (False)
1253        Example:
1254            # return a scan baselined by a third order polynomial,
1255            # not using a mask
1256            bscan = scan.poly_baseline(order=3)
1257        """
1258        if insitu is None: insitu = rcParams['insitu']
1259        varlist = vars()
1260        if mask is None:
1261            mask = [True for i in xrange(self.nchan(-1))]
1262        from asap.asapfitter import fitter
1263        try:
1264            f = fitter()
1265            f.set_scan(self, mask)
1266            #f.set_function(poly=order)
1267            if uselin:
1268                f.set_function(lpoly=order)
1269            else:
1270                f.set_function(poly=order)
1271            s = f.auto_fit(insitu, plot=plot)
1272            s._add_history("poly_baseline", varlist)
1273            if insitu: self._assign(s)
1274            else: return s
1275        except RuntimeError:
1276            msg = "The fit failed, possibly because it didn't converge."
1277            if rcParams['verbose']:
1278                print msg
1279                return
1280            else:
1281                raise RuntimeError(msg)
1282
1283    def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
1284                           threshold=3, chan_avg_limit=1, plot=False,
1285                           insitu=None):
1286        """
1287        Return a scan which has been baselined (all rows) by a polynomial.
1288        Spectral lines are detected first using linefinder and masked out
1289        to avoid them affecting the baseline solution.
1290
1291        Parameters:
1292            mask:       an optional mask retreived from scantable
1293            edge:       an optional number of channel to drop at
1294                        the edge of spectrum. If only one value is
1295                        specified, the same number will be dropped from
1296                        both sides of the spectrum. Default is to keep
1297                        all channels. Nested tuples represent individual
1298                        edge selection for different IFs (a number of spectral
1299                        channels can be different)
1300            order:      the order of the polynomial (default is 0)
1301            threshold:  the threshold used by line finder. It is better to
1302                        keep it large as only strong lines affect the
1303                        baseline solution.
1304            chan_avg_limit:
1305                        a maximum number of consequtive spectral channels to
1306                        average during the search of weak and broad lines.
1307                        The default is no averaging (and no search for weak
1308                        lines). If such lines can affect the fitted baseline
1309                        (e.g. a high order polynomial is fitted), increase this
1310                        parameter (usually values up to 8 are reasonable). Most
1311                        users of this method should find the default value
1312                        sufficient.
1313            plot:       plot the fit and the residual. In this each
1314                        indivual fit has to be approved, by typing 'y'
1315                        or 'n'
1316            insitu:     if False a new scantable is returned.
1317                        Otherwise, the scaling is done in-situ
1318                        The default is taken from .asaprc (False)
1319
1320        Example:
1321            scan2=scan.auto_poly_baseline(order=7)
1322        """
1323        if insitu is None: insitu = rcParams['insitu']
1324        varlist = vars()
1325        from asap.asapfitter import fitter
1326        from asap.asaplinefind import linefinder
1327        from asap import _is_sequence_or_number as _is_valid
1328
1329        # check whether edge is set up for each IF individually
1330        individualedge = False;
1331        if len(edge) > 1:
1332            if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1333                individualedge = True;
1334
1335        if not _is_valid(edge, int) and not individualedge:
1336            raise ValueError, "Parameter 'edge' has to be an integer or a \
1337            pair of integers specified as a tuple. Nested tuples are allowed \
1338            to make individual selection for different IFs."
1339
1340        curedge = (0, 0)
1341        if individualedge:
1342            for edgepar in edge:
1343                if not _is_valid(edgepar, int):
1344                    raise ValueError, "Each element of the 'edge' tuple has \
1345                                       to be a pair of integers or an integer."
1346        else:
1347            curedge = edge;
1348
1349        # setup fitter
1350        f = fitter()
1351        f.set_function(poly=order)
1352
1353        # setup line finder
1354        fl = linefinder()
1355        fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
1356
1357        if not insitu:
1358            workscan = self.copy()
1359        else:
1360            workscan = self
1361
1362        fl.set_scan(workscan)
1363
1364        rows = range(workscan.nrow())
1365        asaplog.push("Processing:")
1366        for r in rows:
1367            msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1368                (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1369                 workscan.getpol(r), workscan.getcycle(r))
1370            asaplog.push(msg, False)
1371
1372            # figure out edge parameter
1373            if individualedge:
1374                if len(edge) >= workscan.getif(r):
1375                    raise RuntimeError, "Number of edge elements appear to " \
1376                                        "be less than the number of IFs"
1377                    curedge = edge[workscan.getif(r)]
1378
1379            # setup line finder
1380            fl.find_lines(r, mask, curedge)
1381            f.set_data(workscan._getabcissa(r),  workscan._getspectrum(r),
1382                        mask_and(workscan._getmask(r), fl.get_mask()))
1383            f.fit()
1384            x = f.get_parameters()
1385            if plot:
1386                f.plot(residual=True)
1387                x = raw_input("Accept fit ( [y]/n ): ")
1388                if x.upper() == 'N':
1389                    continue
1390            workscan._setspectrum(f.fitter.getresidual(), r)
1391        if plot:
1392            f._p.unmap()
1393            f._p = None
1394        workscan._add_history("auto_poly_baseline", varlist)
1395        if insitu:
1396            self._assign(workscan)
1397        else:
1398            return workscan
1399
1400    @print_log_dec
1401    def rotate_linpolphase(self, angle):
1402        """
1403        Rotate the phase of the complex polarization O=Q+iU correlation.
1404        This is always done in situ in the raw data.  So if you call this
1405        function more than once then each call rotates the phase further.
1406        Parameters:
1407            angle:   The angle (degrees) to rotate (add) by.
1408        Examples:
1409            scan.rotate_linpolphase(2.3)
1410        """
1411        varlist = vars()
1412        self._math._rotate_linpolphase(self, angle)
1413        self._add_history("rotate_linpolphase", varlist)
1414        return
1415
1416    @print_log_dec
1417    def rotate_xyphase(self, angle):
1418        """
1419        Rotate the phase of the XY correlation.  This is always done in situ
1420        in the data.  So if you call this function more than once
1421        then each call rotates the phase further.
1422        Parameters:
1423            angle:   The angle (degrees) to rotate (add) by.
1424        Examples:
1425            scan.rotate_xyphase(2.3)
1426        """
1427        varlist = vars()
1428        self._math._rotate_xyphase(self, angle)
1429        self._add_history("rotate_xyphase", varlist)
1430        return
1431
1432    @print_log_dec
1433    def swap_linears(self):
1434        """
1435        Swap the linear polarisations XX and YY, or better the first two
1436        polarisations as this also works for ciculars.
1437        """
1438        varlist = vars()
1439        self._math._swap_linears(self)
1440        self._add_history("swap_linears", varlist)
1441        return
1442
1443    @print_log_dec
1444    def invert_phase(self):
1445        """
1446        Invert the phase of the complex polarisation
1447        """
1448        varlist = vars()
1449        self._math._invert_phase(self)
1450        self._add_history("invert_phase", varlist)
1451        return
1452
1453    @print_log_dec
1454    def add(self, offset, insitu=None):
1455        """
1456        Return a scan where all spectra have the offset added
1457        Parameters:
1458            offset:      the offset
1459            insitu:      if False a new scantable is returned.
1460                         Otherwise, the scaling is done in-situ
1461                         The default is taken from .asaprc (False)
1462        """
1463        if insitu is None: insitu = rcParams['insitu']
1464        self._math._setinsitu(insitu)
1465        varlist = vars()
1466        s = scantable(self._math._unaryop(self, offset, "ADD", False))
1467        s._add_history("add", varlist)
1468        if insitu:
1469            self._assign(s)
1470        else:
1471            return s
1472
1473    @print_log_dec
1474    def scale(self, factor, tsys=True, insitu=None):
1475        """
1476        Return a scan where all spectra are scaled by the give 'factor'
1477        Parameters:
1478            factor:      the scaling factor
1479            insitu:      if False a new scantable is returned.
1480                         Otherwise, the scaling is done in-situ
1481                         The default is taken from .asaprc (False)
1482            tsys:        if True (default) then apply the operation to Tsys
1483                         as well as the data
1484        """
1485        if insitu is None: insitu = rcParams['insitu']
1486        self._math._setinsitu(insitu)
1487        varlist = vars()
1488        s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
1489        s._add_history("scale", varlist)
1490        if insitu:
1491            self._assign(s)
1492        else:
1493            return s
1494
1495    def set_sourcetype(self, match, matchtype="pattern",
1496                       sourcetype="reference"):
1497        """
1498        Set the type of the source to be an source or reference scan
1499        using the provided pattern:
1500        Parameters:
1501            match:          a Unix style pattern, regular expression or selector
1502            matchtype:      'pattern' (default) UNIX style pattern or
1503                            'regex' regular expression
1504            sourcetype:     the type of the source to use (source/reference)
1505        """
1506        varlist = vars()
1507        basesel = self.get_selection()
1508        stype = -1
1509        if sourcetype.lower().startswith("r"):
1510            stype = 1
1511        elif sourcetype.lower().startswith("s"):
1512            stype = 0
1513        else:
1514            raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
1515        if matchtype.lower().startswith("p"):
1516            matchtype = "pattern"
1517        elif matchtype.lower().startswith("r"):
1518            matchtype = "regex"
1519        else:
1520            raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
1521        sel = selector()
1522        if isinstance(match, selector):
1523            sel = match
1524        else:
1525            sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
1526        self.set_selection(basesel+sel)
1527        self._setsourcetype(stype)
1528        self.set_selection(basesel)
1529        self._add_history("set_sourcetype", varlist)
1530
1531    @print_log_dec
1532    def auto_quotient(self, preserve=True, mode='paired'):
1533        """
1534        This function allows to build quotients automatically.
1535        It assumes the observation to have the same numer of
1536        "ons" and "offs"
1537        Parameters:
1538            preserve:       you can preserve (default) the continuum or
1539                            remove it.  The equations used are
1540                            preserve: Output = Toff * (on/off) - Toff
1541                            remove:   Output = Toff * (on/off) - Ton
1542            mode:           the on/off detection mode
1543                            'paired' (default)
1544                            identifies 'off' scans by the
1545                            trailing '_R' (Mopra/Parkes) or
1546                            '_e'/'_w' (Tid) and matches
1547                            on/off pairs from the observing pattern
1548                            'time'
1549                            finds the closest off in time
1550
1551        """
1552        modes = ["time", "paired"]
1553        if not mode in modes:
1554            msg = "please provide valid mode. Valid modes are %s" % (modes)
1555            raise ValueError(msg)
1556        varlist = vars()
1557        s = None
1558        if mode.lower() == "paired":
1559            basesel = self.get_selection()
1560            sel = selector()+basesel
1561            sel.set_query("SRCTYPE==1")
1562            self.set_selection(sel)
1563            offs = self.copy()
1564            sel.set_query("SRCTYPE==0")
1565            self.set_selection(sel)
1566            ons = self.copy()
1567            s = scantable(self._math._quotient(ons, offs, preserve))
1568            self.set_selection(basesel)
1569        elif mode.lower() == "time":
1570            s = scantable(self._math._auto_quotient(self, mode, preserve))
1571        s._add_history("auto_quotient", varlist)
1572        return s
1573
1574    @print_log_dec
1575    def mx_quotient(self, mask = None, weight='median', preserve=True):
1576        """
1577        Form a quotient using "off" beams when observing in "MX" mode.
1578        Parameters:
1579            mask:           an optional mask to be used when weight == 'stddev'
1580            weight:         How to average the off beams.  Default is 'median'.
1581            preserve:       you can preserve (default) the continuum or
1582                            remove it.  The equations used are
1583                            preserve: Output = Toff * (on/off) - Toff
1584                            remove:   Output = Toff * (on/off) - Ton
1585        """
1586        mask = mask or ()
1587        varlist = vars()
1588        on = scantable(self._math._mx_extract(self, 'on'))
1589        preoff = scantable(self._math._mx_extract(self, 'off'))
1590        off = preoff.average_time(mask=mask, weight=weight, scanav=False)
1591        from asapmath  import quotient
1592        q = quotient(on, off, preserve)
1593        q._add_history("mx_quotient", varlist)
1594        return q
1595
1596    @print_log_dec
1597    def freq_switch(self, insitu=None):
1598        """
1599        Apply frequency switching to the data.
1600        Parameters:
1601            insitu:      if False a new scantable is returned.
1602                         Otherwise, the swictching is done in-situ
1603                         The default is taken from .asaprc (False)
1604        Example:
1605            none
1606        """
1607        if insitu is None: insitu = rcParams['insitu']
1608        self._math._setinsitu(insitu)
1609        varlist = vars()
1610        s = scantable(self._math._freqswitch(self))
1611        s._add_history("freq_switch", varlist)
1612        if insitu: self._assign(s)
1613        else: return s
1614
1615    @print_log_dec
1616    def recalc_azel(self):
1617        """
1618        Recalculate the azimuth and elevation for each position.
1619        Parameters:
1620            none
1621        Example:
1622        """
1623        varlist = vars()
1624        self._recalcazel()
1625        self._add_history("recalc_azel", varlist)
1626        return
1627
1628    @print_log_dec
1629    def __add__(self, other):
1630        varlist = vars()
1631        s = None
1632        if isinstance(other, scantable):
1633            s = scantable(self._math._binaryop(self, other, "ADD"))
1634        elif isinstance(other, float):
1635            s = scantable(self._math._unaryop(self, other, "ADD", False))
1636        else:
1637            raise TypeError("Other input is not a scantable or float value")
1638        s._add_history("operator +", varlist)
1639        return s
1640
1641    @print_log_dec
1642    def __sub__(self, other):
1643        """
1644        implicit on all axes and on Tsys
1645        """
1646        varlist = vars()
1647        s = None
1648        if isinstance(other, scantable):
1649            s = scantable(self._math._binaryop(self, other, "SUB"))
1650        elif isinstance(other, float):
1651            s = scantable(self._math._unaryop(self, other, "SUB", False))
1652        else:
1653            raise TypeError("Other input is not a scantable or float value")
1654        s._add_history("operator -", varlist)
1655        return s
1656
1657    @print_log_dec
1658    def __mul__(self, other):
1659        """
1660        implicit on all axes and on Tsys
1661        """
1662        varlist = vars()
1663        s = None
1664        if isinstance(other, scantable):
1665            s = scantable(self._math._binaryop(self, other, "MUL"))
1666        elif isinstance(other, float):
1667            s = scantable(self._math._unaryop(self, other, "MUL", False))
1668        else:
1669            raise TypeError("Other input is not a scantable or float value")
1670        s._add_history("operator *", varlist)
1671        return s
1672
1673
1674    @print_log_dec
1675    def __div__(self, other):
1676        """
1677        implicit on all axes and on Tsys
1678        """
1679        varlist = vars()
1680        s = None
1681        if isinstance(other, scantable):
1682            s = scantable(self._math._binaryop(self, other, "DIV"))
1683        elif isinstance(other, float):
1684            if other == 0.0:
1685                raise ZeroDivisionError("Dividing by zero is not recommended")
1686            s = scantable(self._math._unaryop(self, other, "DIV", False))
1687        else:
1688            raise TypeError("Other input is not a scantable or float value")
1689        s._add_history("operator /", varlist)
1690        return s
1691
1692    def get_fit(self, row=0):
1693        """
1694        Print or return the stored fits for a row in the scantable
1695        Parameters:
1696            row:    the row which the fit has been applied to.
1697        """
1698        if row > self.nrow():
1699            return
1700        from asap.asapfit import asapfit
1701        fit = asapfit(self._getfit(row))
1702        if rcParams['verbose']:
1703            print fit
1704            return
1705        else:
1706            return fit.as_dict()
1707
1708    def flag_nans(self):
1709        """
1710        Utility function to flag NaN values in the scantable.
1711        """
1712        import numpy
1713        basesel = self.get_selection()
1714        for i in range(self.nrow()):
1715            sel = self.get_row_selector(i)
1716            self.set_selection(basesel+sel)
1717            nans = numpy.isnan(self._getspectrum(0))
1718        if numpy.any(nans):
1719            bnans = [ bool(v) for v in nans]
1720            self.flag(bnans)
1721        self.set_selection(basesel)
1722
1723    def get_row_selector(self, rowno):
1724        return selector(beams=self.getbeam(rowno),
1725                        ifs=self.getif(rowno),
1726                        pols=self.getpol(rowno),
1727                        scans=self.getscan(rowno),
1728                        cycles=self.getcycle(rowno))
1729
1730    def _add_history(self, funcname, parameters):
1731        if not rcParams['scantable.history']:
1732            return
1733        # create date
1734        sep = "##"
1735        from datetime import datetime
1736        dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1737        hist = dstr+sep
1738        hist += funcname+sep#cdate+sep
1739        if parameters.has_key('self'): del parameters['self']
1740        for k, v in parameters.iteritems():
1741            if type(v) is dict:
1742                for k2, v2 in v.iteritems():
1743                    hist += k2
1744                    hist += "="
1745                    if isinstance(v2, scantable):
1746                        hist += 'scantable'
1747                    elif k2 == 'mask':
1748                        if isinstance(v2, list) or isinstance(v2, tuple):
1749                            hist += str(self._zip_mask(v2))
1750                        else:
1751                            hist += str(v2)
1752                    else:
1753                        hist += str(v2)
1754            else:
1755                hist += k
1756                hist += "="
1757                if isinstance(v, scantable):
1758                    hist += 'scantable'
1759                elif k == 'mask':
1760                    if isinstance(v, list) or isinstance(v, tuple):
1761                        hist += str(self._zip_mask(v))
1762                    else:
1763                        hist += str(v)
1764                else:
1765                    hist += str(v)
1766            hist += sep
1767        hist = hist[:-2] # remove trailing '##'
1768        self._addhistory(hist)
1769
1770
1771    def _zip_mask(self, mask):
1772        mask = list(mask)
1773        i = 0
1774        segments = []
1775        while mask[i:].count(1):
1776            i += mask[i:].index(1)
1777            if mask[i:].count(0):
1778                j = i + mask[i:].index(0)
1779            else:
1780                j = len(mask)
1781            segments.append([i, j])
1782            i = j
1783        return segments
1784
1785    def _get_ordinate_label(self):
1786        fu = "("+self.get_fluxunit()+")"
1787        import re
1788        lbl = "Intensity"
1789        if re.match(".K.", fu):
1790            lbl = "Brightness Temperature "+ fu
1791        elif re.match(".Jy.", fu):
1792            lbl = "Flux density "+ fu
1793        return lbl
1794
1795    def _check_ifs(self):
1796        nchans = [self.nchan(i) for i in range(self.nif(-1))]
1797        nchans = filter(lambda t: t > 0, nchans)
1798        return (sum(nchans)/len(nchans) == nchans[0])
1799
1800    def _fill(self, names, unit, average):
1801        import os
1802        from asap._asap import stfiller
1803        first = True
1804        fullnames = []
1805        for name in names:
1806            name = os.path.expandvars(name)
1807            name = os.path.expanduser(name)
1808            if not os.path.exists(name):
1809                msg = "File '%s' does not exists" % (name)
1810                if rcParams['verbose']:
1811                    asaplog.push(msg)
1812                    print asaplog.pop().strip()
1813                    return
1814                raise IOError(msg)
1815            fullnames.append(name)
1816        if average:
1817            asaplog.push('Auto averaging integrations')
1818        stype = int(rcParams['scantable.storage'].lower() == 'disk')
1819        for name in fullnames:
1820            tbl = Scantable(stype)
1821            r = stfiller(tbl)
1822            rx = rcParams['scantable.reference']
1823            r._setreferenceexpr(rx)
1824            msg = "Importing %s..." % (name)
1825            asaplog.push(msg, False)
1826            print_log()
1827            r._open(name, -1, -1)
1828            r._read()
1829            if average:
1830                tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
1831            if not first:
1832                tbl = self._math._merge([self, tbl])
1833            Scantable.__init__(self, tbl)
1834            r._close()
1835            del r, tbl
1836            first = False
1837        if unit is not None:
1838            self.set_fluxunit(unit)
1839        self.set_freqframe(rcParams['scantable.freqframe'])
1840
1841    def __getitem__(self, key):
1842        if key < 0:
1843            key += self.nrow()
1844        if key >= self.nrow():
1845            raise IndexError("Row index out of range.")
1846        return self._getspectrum(key)
1847
1848    def __setitem__(self, key, value):
1849        if key < 0:
1850            key += self.nrow()
1851        if key >= self.nrow():
1852            raise IndexError("Row index out of range.")
1853        if not hasattr(value, "__len__") or \
1854                len(value) > self.nchan(self.getif(key)):
1855            raise ValueError("Spectrum length doesn't match.")
1856        return self._setspectrum(value, key)
1857
1858    def __len__(self):
1859        return self.nrow()
1860
1861    def __iter__(self):
1862        for i in range(len(self)):
1863            yield self[i]
Note: See TracBrowser for help on using the repository browser.