source: trunk/python/scantable.py @ 1373

Last change on this file since 1373 was 1373, checked in by mar637, 17 years ago

Added running median to smooth. This addresses Ticket #115

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 65.4 KB
Line 
1from asap._asap import Scantable
2from asap import rcParams
3from asap import print_log
4from asap import asaplog
5from asap import selector
6from asap import linecatalog
7from asap import _n_bools, mask_not, mask_and, mask_or
8
9class scantable(Scantable):
10    """
11        The ASAP container for scans
12    """
13
14    def __init__(self, filename, average=None, unit=None):
15        """
16        Create a scantable from a saved one or make a reference
17        Parameters:
18            filename:    the name of an asap table on disk
19                         or
20                         the name of a rpfits/sdfits/ms file
21                         (integrations within scans are auto averaged
22                         and the whole file is read)
23                         or
24                         [advanced] a reference to an existing
25                         scantable
26            average:     average all integrations withinb a scan on read.
27                         The default (True) is taken from .asaprc.
28            unit:         brightness unit; must be consistent with K or Jy.
29                         Over-rides the default selected by the reader
30                         (input rpfits/sdfits/ms) or replaces the value
31                         in existing scantables
32        """
33        if average is None:
34            average = rcParams['scantable.autoaverage']
35        varlist = vars()
36        from asap._asap import stmath
37        self._math = stmath()
38        if isinstance(filename, Scantable):
39            Scantable.__init__(self, filename)
40        else:
41            if isinstance(filename, str):
42                import os.path
43                filename = os.path.expandvars(filename)
44                filename = os.path.expanduser(filename)
45                if not os.path.exists(filename):
46                    s = "File '%s' not found." % (filename)
47                    if rcParams['verbose']:
48                        asaplog.push(s)
49                        print asaplog.pop().strip()
50                        return
51                    raise IOError(s)
52                if os.path.isdir(filename) \
53                    and not os.path.exists(filename+'/table.f1'):
54                    # crude check if asap table
55                    if os.path.exists(filename+'/table.info'):
56                        ondisk = rcParams['scantable.storage'] == 'disk'
57                        Scantable.__init__(self, filename, ondisk)
58                        if unit is not None:
59                            self.set_fluxunit(unit)
60                        self.set_freqframe(rcParams['scantable.freqframe'])
61                    else:
62                        msg = "The given file '%s'is not a valid " \
63                              "asap table." % (filename)
64                        if rcParams['verbose']:
65                            print msg
66                            return
67                        else:
68                            raise IOError(msg)
69                else:
70                    self._fill([filename], unit, average)
71            elif (isinstance(filename, list) or isinstance(filename, tuple)) \
72                  and isinstance(filename[-1], str):
73                self._fill(filename, unit, average)
74        self._add_history("scantable", varlist)
75        print_log()
76
77    def save(self, name=None, format=None, overwrite=False):
78        """
79        Store the scantable on disk. This can be an asap (aips++) Table,
80        SDFITS or MS2 format.
81        Parameters:
82            name:        the name of the outputfile. For format "ASCII"
83                         this is the root file name (data in 'name'.txt
84                         and header in 'name'_header.txt)
85            format:      an optional file format. Default is ASAP.
86                         Allowed are - 'ASAP' (save as ASAP [aips++] Table),
87                                       'SDFITS' (save as SDFITS file)
88                                       'ASCII' (saves as ascii text file)
89                                       'MS2' (saves as an aips++
90                                              MeasurementSet V2)
91            overwrite:   If the file should be overwritten if it exists.
92                         The default False is to return with warning
93                         without writing the output. USE WITH CARE.
94        Example:
95            scan.save('myscan.asap')
96            scan.save('myscan.sdfits', 'SDFITS')
97        """
98        from os import path
99        if format is None: format = rcParams['scantable.save']
100        suffix = '.'+format.lower()
101        if name is None or name == "":
102            name = 'scantable'+suffix
103            msg = "No filename given. Using default name %s..." % name
104            asaplog.push(msg)
105        name = path.expandvars(name)
106        if path.isfile(name) or path.isdir(name):
107            if not overwrite:
108                msg = "File %s exists." % name
109                if rcParams['verbose']:
110                    print msg
111                    return
112                else:
113                    raise IOError(msg)
114        format2 = format.upper()
115        if format2 == 'ASAP':
116            self._save(name)
117        else:
118            from asap._asap import stwriter as stw
119            writer = stw(format2)
120            writer.write(self, name)
121        print_log()
122        return
123
124    def copy(self):
125        """
126        Return a copy of this scantable.
127        Note:
128            This makes a full (deep) copy. scan2 = scan1 makes a reference.
129        Parameters:
130            none
131        Example:
132            copiedscan = scan.copy()
133        """
134        sd = scantable(Scantable._copy(self))
135        return sd
136
137    def drop_scan(self, scanid=None):
138        """
139        Return a new scantable where the specified scan number(s) has(have)
140        been dropped.
141        Parameters:
142            scanid:    a (list of) scan number(s)
143        """
144        from asap import _is_sequence_or_number as _is_valid
145        from asap import _to_list
146        from asap import unique
147        if not _is_valid(scanid):
148            if rcParams['verbose']:
149                print "Please specify a scanno to drop from the scantable"
150                return
151            else:
152                raise RuntimeError("No scan given")
153        try:
154            scanid = _to_list(scanid)
155            allscans = unique([ self.getscan(i) for i in range(self.nrow())])
156            for sid in scanid: allscans.remove(sid)
157            if len(allscans) == 0:
158                raise ValueError("Can't remove all scans")
159        except ValueError:
160            if rcParams['verbose']:
161                print "Couldn't find any match."
162                return
163            else: raise
164        try:
165            bsel = self.get_selection()
166            sel = selector()
167            sel.set_scans(allscans)
168            self.set_selection(bsel+sel)
169            scopy = self._copy()
170            self.set_selection(bsel)
171            return scantable(scopy)
172        except RuntimeError:
173            if rcParams['verbose']:
174                print "Couldn't find any match."
175            else:
176                raise
177
178
179    def get_scan(self, scanid=None):
180        """
181        Return a specific scan (by scanno) or collection of scans (by
182        source name) in a new scantable.
183        Note:
184            See scantable.drop_scan() for the inverse operation.
185        Parameters:
186            scanid:    a (list of) scanno or a source name, unix-style
187                       patterns are accepted for source name matching, e.g.
188                       '*_R' gets all 'ref scans
189        Example:
190            # get all scans containing the source '323p459'
191            newscan = scan.get_scan('323p459')
192            # get all 'off' scans
193            refscans = scan.get_scan('*_R')
194            # get a susbset of scans by scanno (as listed in scan.summary())
195            newscan = scan.get_scan([0, 2, 7, 10])
196        """
197        if scanid is None:
198            if rcParams['verbose']:
199                print "Please specify a scan no or name to " \
200                      "retrieve from the scantable"
201                return
202            else:
203                raise RuntimeError("No scan given")
204
205        try:
206            bsel = self.get_selection()
207            sel = selector()
208            if type(scanid) is str:
209                sel.set_name(scanid)
210                self.set_selection(bsel+sel)
211                scopy = self._copy()
212                self.set_selection(bsel)
213                return scantable(scopy)
214            elif type(scanid) is int:
215                sel.set_scans([scanid])
216                self.set_selection(bsel+sel)
217                scopy = self._copy()
218                self.set_selection(bsel)
219                return scantable(scopy)
220            elif type(scanid) is list:
221                sel.set_scans(scanid)
222                self.set_selection(sel)
223                scopy = self._copy()
224                self.set_selection(bsel)
225                return scantable(scopy)
226            else:
227                msg = "Illegal scanid type, use 'int' or 'list' if ints."
228                if rcParams['verbose']:
229                    print msg
230                else:
231                    raise TypeError(msg)
232        except RuntimeError:
233            if rcParams['verbose']: print "Couldn't find any match."
234            else: raise
235
236    def __str__(self):
237        return Scantable._summary(self, True)
238
239    def summary(self, filename=None):
240        """
241        Print a summary of the contents of this scantable.
242        Parameters:
243            filename:    the name of a file to write the putput to
244                         Default - no file output
245            verbose:     print extra info such as the frequency table
246                         The default (False) is taken from .asaprc
247        """
248        info = Scantable._summary(self, True)
249        #if verbose is None: verbose = rcParams['scantable.verbosesummary']
250        if filename is not None:
251            if filename is "":
252                filename = 'scantable_summary.txt'
253            from os.path import expandvars, isdir
254            filename = expandvars(filename)
255            if not isdir(filename):
256                data = open(filename, 'w')
257                data.write(info)
258                data.close()
259            else:
260                msg = "Illegal file name '%s'." % (filename)
261                if rcParams['verbose']:
262                    print msg
263                else:
264                    raise IOError(msg)
265        if rcParams['verbose']:
266            try:
267                from IPython.genutils import page as pager
268            except ImportError:
269                from pydoc import pager
270            pager(info)
271        else:
272            return info
273
274
275    def get_selection(self):
276        """
277        Get the selection object currently set on this scantable.
278        Parameters:
279            none
280        Example:
281            sel = scan.get_selection()
282            sel.set_ifs(0)              # select IF 0
283            scan.set_selection(sel)     # apply modified selection
284        """
285        return selector(self._getselection())
286
287    def set_selection(self, selection=selector()):
288        """
289        Select a subset of the data. All following operations on this scantable
290        are only applied to thi selection.
291        Parameters:
292            selection:    a selector object (default unset the selection)
293        Examples:
294            sel = selector()         # create a selection object
295            self.set_scans([0, 3])    # select SCANNO 0 and 3
296            scan.set_selection(sel)  # set the selection
297            scan.summary()           # will only print summary of scanno 0 an 3
298            scan.set_selection()     # unset the selection
299        """
300        self._setselection(selection)
301
302    def stats(self, stat='stddev', mask=None):
303        """
304        Determine the specified statistic of the current beam/if/pol
305        Takes a 'mask' as an optional parameter to specify which
306        channels should be excluded.
307        Parameters:
308            stat:    'min', 'max', 'sumsq', 'sum', 'mean'
309                     'var', 'stddev', 'avdev', 'rms', 'median'
310            mask:    an optional mask specifying where the statistic
311                     should be determined.
312        Example:
313            scan.set_unit('channel')
314            msk = scan.create_mask([100, 200], [500, 600])
315            scan.stats(stat='mean', mask=m)
316        """
317        if mask == None:
318            mask = []
319        axes = ['Beam', 'IF', 'Pol', 'Time']
320        if not self._check_ifs():
321            raise ValueError("Cannot apply mask as the IFs have different "
322                             "number of channels. Please use setselection() "
323                             "to select individual IFs")
324
325        statvals = self._math._stats(self, mask, stat)
326        out = ''
327        axes = []
328        for i in range(self.nrow()):
329            axis = []
330            axis.append(self.getscan(i))
331            axis.append(self.getbeam(i))
332            axis.append(self.getif(i))
333            axis.append(self.getpol(i))
334            axis.append(self.getcycle(i))
335            axes.append(axis)
336            tm = self._gettime(i)
337            src = self._getsourcename(i)
338            out += 'Scan[%d] (%s) ' % (axis[0], src)
339            out += 'Time[%s]:\n' % (tm)
340            if self.nbeam(-1) > 1: out +=  ' Beam[%d] ' % (axis[1])
341            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (axis[2])
342            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (axis[3])
343            out += '= %3.3f\n' % (statvals[i])
344            out +=  "--------------------------------------------------\n"
345
346        if rcParams['verbose']:
347            print "--------------------------------------------------"
348            print " ", stat
349            print "--------------------------------------------------"
350            print out
351        #else:
352            #retval = { 'axesnames': ['scanno', 'beamno', 'ifno', 'polno', 'cycleno'],
353            #           'axes' : axes,
354            #           'data': statvals}
355        return statvals
356
357    def stddev(self, mask=None):
358        """
359        Determine the standard deviation of the current beam/if/pol
360        Takes a 'mask' as an optional parameter to specify which
361        channels should be excluded.
362        Parameters:
363            mask:    an optional mask specifying where the standard
364                     deviation should be determined.
365
366        Example:
367            scan.set_unit('channel')
368            msk = scan.create_mask([100, 200], [500, 600])
369            scan.stddev(mask=m)
370        """
371        return self.stats(stat='stddev', mask=mask);
372
373
374    def get_column_names(self):
375        """
376        Return a  list of column names, which can be used for selection.
377        """
378        return list(Scantable.get_column_names(self))
379
380    def get_tsys(self):
381        """
382        Return the System temperatures.
383        Returns:
384            a list of Tsys values for the current selection
385        """
386
387        return self._row_callback(self._gettsys, "Tsys")
388
389    def _row_callback(self, callback, label):
390        axes = []
391        axesnames = ['scanno', 'beamno', 'ifno', 'polno', 'cycleno']
392        out = ""
393        outvec = []
394        for i in range(self.nrow()):
395            axis = []
396            axis.append(self.getscan(i))
397            axis.append(self.getbeam(i))
398            axis.append(self.getif(i))
399            axis.append(self.getpol(i))
400            axis.append(self.getcycle(i))
401            axes.append(axis)
402            tm = self._gettime(i)
403            src = self._getsourcename(i)
404            out += 'Scan[%d] (%s) ' % (axis[0], src)
405            out += 'Time[%s]:\n' % (tm)
406            if self.nbeam(-1) > 1: out +=  ' Beam[%d] ' % (axis[1])
407            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (axis[2])
408            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (axis[3])
409            outvec.append(callback(i))
410            out += '= %3.3f\n' % (outvec[i])
411            out +=  "--------------------------------------------------\n"
412        if rcParams['verbose']:
413            print "--------------------------------------------------"
414            print " %s" % (label)
415            print "--------------------------------------------------"
416            print out
417        # disabled because the vector seems more useful
418        #retval = {'axesnames': axesnames, 'axes': axes, 'data': outvec}
419        return outvec
420
421    def _get_column(self, callback, row=-1):
422        """
423        """
424        if row == -1:
425            return [callback(i) for i in range(self.nrow())]
426        else:
427            if  0 <= row < self.nrow():
428                return callback(row)
429
430
431    def get_time(self, row=-1, asdatetime=False):
432        """
433        Get a list of time stamps for the observations.
434        Return a datetime object for each integration time stamp in the scantable.
435        Parameters:
436            row:          row no of integration. Default -1 return all rows
437            asdatetime:   return values as datetime objects rather than strings
438        Example:
439            none
440        """
441        from time import strptime
442        from datetime import datetime
443        if not asdatetime:
444            return self._get_column(self._gettime, row)
445        format = "%Y/%m/%d/%H:%M:%S"
446        if isinstance(times, list):
447            return [datetime(*strptime(i, format)[:6]) for i in times]
448        else:
449            return datetime(*strptime(times, format)[:6])
450
451
452    def get_inttime(self, row=-1):
453        """
454        Get a list of integration times for the observations.
455        Return a time in seconds for each integration in the scantable.
456        Parameters:
457            row:    row no of integration. Default -1 return all rows.
458        Example:
459            none
460        """
461        return self._get_column(self._getinttime, row)       
462       
463
464    def get_sourcename(self, row=-1):
465        """
466        Get a list source names for the observations.
467        Return a string for each integration in the scantable.
468        Parameters:
469            row:    row no of integration. Default -1 return all rows.
470        Example:
471            none
472        """
473        return self._get_column(self._getsourcename, row)
474
475    def get_elevation(self, row=-1):
476        """
477        Get a list of elevations for the observations.
478        Return a float for each integration in the scantable.
479        Parameters:
480            row:    row no of integration. Default -1 return all rows.
481        Example:
482            none
483        """
484        return self._get_column(self._getelevation, row)
485
486    def get_azimuth(self, row=-1):
487        """
488        Get a list of azimuths for the observations.
489        Return a float for each integration in the scantable.
490        Parameters:
491            row:    row no of integration. Default -1 return all rows.
492        Example:
493            none
494        """
495        return self._get_column(self._getazimuth, row)
496
497    def get_parangle(self, row=-1):
498        """
499        Get a list of parallactic angles for the observations.
500        Return a float for each integration in the scantable.
501        Parameters:
502            row:    row no of integration. Default -1 return all rows.
503        Example:
504            none
505        """
506        return self._get_column(self._getparangle, row)
507
508    def get_direction(self, row=-1):
509        """
510        Get a list of Positions on the sky (direction) for the observations.
511        Return a float for each integration in the scantable.
512        Parameters:
513            row:    row no of integration. Default -1 return all rows
514        Example:
515            none
516        """
517        return self._get_column(self._getdirection, row)
518
519    def set_unit(self, unit='channel'):
520        """
521        Set the unit for all following operations on this scantable
522        Parameters:
523            unit:    optional unit, default is 'channel'
524                     one of '*Hz', 'km/s', 'channel', ''
525        """
526        varlist = vars()
527        if unit in ['', 'pixel', 'channel']:
528            unit = ''
529        inf = list(self._getcoordinfo())
530        inf[0] = unit
531        self._setcoordinfo(inf)
532        self._add_history("set_unit", varlist)
533
534    def set_instrument(self, instr):
535        """
536        Set the instrument for subsequent processing.
537        Parameters:
538            instr:    Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
539                      'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
540        """
541        self._setInstrument(instr)
542        self._add_history("set_instument", vars())
543        print_log()
544
545    def set_feedtype(self, feedtype):
546        """
547        Overwrite the feed type, which might not be set correctly.
548        Parameters:
549            feedtype:     'linear' or 'circular'
550        """
551        self._setfeedtype(feedtype)
552        self._add_history("set_feedtype", vars())
553        print_log()
554
555    def set_doppler(self, doppler='RADIO'):
556        """
557        Set the doppler for all following operations on this scantable.
558        Parameters:
559            doppler:    One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
560        """
561        varlist = vars()
562        inf = list(self._getcoordinfo())
563        inf[2] = doppler
564        self._setcoordinfo(inf)
565        self._add_history("set_doppler", vars())
566        print_log()
567
568    def set_freqframe(self, frame=None):
569        """
570        Set the frame type of the Spectral Axis.
571        Parameters:
572            frame:   an optional frame type, default 'LSRK'. Valid frames are:
573                     'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
574                     'GEO', 'GALACTO', 'LGROUP', 'CMB'
575        Examples:
576            scan.set_freqframe('BARY')
577        """
578        if frame is None: frame = rcParams['scantable.freqframe']
579        varlist = vars()
580        valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
581                   'GEO', 'GALACTO', 'LGROUP', 'CMB']
582
583        if frame in valid:
584            inf = list(self._getcoordinfo())
585            inf[1] = frame
586            self._setcoordinfo(inf)
587            self._add_history("set_freqframe", varlist)
588        else:
589            msg  = "Please specify a valid freq type. Valid types are:\n", valid
590            if rcParams['verbose']:
591                print msg
592            else:
593                raise TypeError(msg)
594        print_log()
595
596    def set_dirframe(self, frame=""):
597        """
598        Set the frame type of the Direction on the sky.
599        Parameters:
600            frame:   an optional frame type, default ''. Valid frames are:
601                     'J2000', 'B1950', 'GALACTIC'
602        Examples:
603            scan.set_dirframe('GALACTIC')
604        """
605        varlist = vars()
606        try:
607            Scantable.set_dirframe(self, frame)
608        except RuntimeError, msg:
609            if rcParams['verbose']:
610                print msg
611            else:
612                raise
613        self._add_history("set_dirframe", varlist)
614
615    def get_unit(self):
616        """
617        Get the default unit set in this scantable
618        Returns:
619            A unit string
620        """
621        inf = self._getcoordinfo()
622        unit = inf[0]
623        if unit == '': unit = 'channel'
624        return unit
625
626    def get_abcissa(self, rowno=0):
627        """
628        Get the abcissa in the current coordinate setup for the currently
629        selected Beam/IF/Pol
630        Parameters:
631            rowno:    an optional row number in the scantable. Default is the
632                      first row, i.e. rowno=0
633        Returns:
634            The abcissa values and the format string (as a dictionary)
635        """
636        abc = self._getabcissa(rowno)
637        lbl = self._getabcissalabel(rowno)
638        print_log()
639        return abc, lbl
640
641    def flag(self, mask=None):
642        """
643        Flag the selected data using an optional channel mask.
644        Parameters:
645            mask:   an optional channel mask, created with create_mask. Default
646                    (no mask) is all channels.
647        """
648        varlist = vars()
649        if mask is None:
650            mask = []
651        try:
652            self._flag(mask)
653        except RuntimeError, msg:
654            if rcParams['verbose']:
655                print msg
656                return
657            else: raise
658        self._add_history("flag", varlist)
659
660    def lag_flag(self, frequency, width=0.0, unit="GHz", insitu=None):
661        """
662        Flag the data in 'lag' space by providing a frequency to remove.
663        Flagged data in the scantable gets set to 0.0 before the fft.
664        No taper is applied.
665        Parameters:
666            frequency:    the frequency (really a period within the bandwidth)
667                          to remove
668            width:        the width of the frequency to remove, to remove a
669                          range of frequencies aroung the centre.
670            unit:         the frequency unit (default "GHz")
671        Notes:
672            It is recommended to flag edges of the band or strong
673            signals beforehand.
674        """
675        if insitu is None: insitu = rcParams['insitu']
676        self._math._setinsitu(insitu)
677        varlist = vars()
678        base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1. }
679        if not base.has_key(unit):
680            raise ValueError("%s is not a valid unit." % unit)
681        try:
682            s = scantable(self._math._lag_flag(self, frequency*base[unit],
683                                               width*base[unit]))
684        except RuntimeError, msg:
685            if rcParams['verbose']:
686                print msg
687                return
688            else: raise
689        s._add_history("lag_flag", varlist)
690        print_log()
691        if insitu:
692            self._assign(s)
693        else:
694            return s
695
696
697    def create_mask(self, *args, **kwargs):
698        """
699        Compute and return a mask based on [min, max] windows.
700        The specified windows are to be INCLUDED, when the mask is
701        applied.
702        Parameters:
703            [min, max], [min2, max2], ...
704                Pairs of start/end points (inclusive)specifying the regions
705                to be masked
706            invert:     optional argument. If specified as True,
707                        return an inverted mask, i.e. the regions
708                        specified are EXCLUDED
709            row:        create the mask using the specified row for
710                        unit conversions, default is row=0
711                        only necessary if frequency varies over rows.
712        Example:
713            scan.set_unit('channel')
714            a)
715            msk = scan.create_mask([400, 500], [800, 900])
716            # masks everything outside 400 and 500
717            # and 800 and 900 in the unit 'channel'
718
719            b)
720            msk = scan.create_mask([400, 500], [800, 900], invert=True)
721            # masks the regions between 400 and 500
722            # and 800 and 900 in the unit 'channel'
723            c)
724            mask only channel 400
725            msk =  scan.create_mask([400, 400])
726        """
727        row = 0
728        if kwargs.has_key("row"):
729            row = kwargs.get("row")
730        data = self._getabcissa(row)
731        u = self._getcoordinfo()[0]
732        if rcParams['verbose']:
733            if u == "": u = "channel"
734            msg = "The current mask window unit is %s" % u
735            i = self._check_ifs()
736            if not i:
737                msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
738            asaplog.push(msg)
739        n = self.nchan()
740        msk = _n_bools(n, False)
741        # test if args is a 'list' or a 'normal *args - UGLY!!!
742
743        ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
744             and args or args[0]
745        for window in ws:
746            if (len(window) != 2 or window[0] > window[1] ):
747                raise TypeError("A window needs to be defined as [min, max]")
748            for i in range(n):
749                if data[i] >= window[0] and data[i] <= window[1]:
750                    msk[i] = True
751        if kwargs.has_key('invert'):
752            if kwargs.get('invert'):
753                msk = mask_not(msk)
754        print_log()
755        return msk
756
757    def get_restfreqs(self):
758        """
759        Get the restfrequency(s) stored in this scantable.
760        The return value(s) are always of unit 'Hz'
761        Parameters:
762            none
763        Returns:
764            a list of doubles
765        """
766        return list(self._getrestfreqs())
767
768
769    def set_restfreqs(self, freqs=None, unit='Hz'):
770        """
771        Set or replace the restfrequency specified and
772        If the 'freqs' argument holds a scalar,
773        then that rest frequency will be applied to all the selected
774        data.  If the 'freqs' argument holds
775        a vector, then it MUST be of equal or smaller length than
776        the number of IFs (and the available restfrequencies will be
777        replaced by this vector).  In this case, *all* data have
778        the restfrequency set per IF according
779        to the corresponding value you give in the 'freqs' vector.
780        E.g. 'freqs=[1e9, 2e9]'  would mean IF 0 gets restfreq 1e9 and
781        IF 1 gets restfreq 2e9.
782        You can also specify the frequencies via a linecatalog/
783
784        Parameters:
785            freqs:   list of rest frequency values or string idenitfiers
786            unit:    unit for rest frequency (default 'Hz')
787
788        Example:
789            # set the given restfrequency for the whole table
790            scan.set_restfreqs(freqs=1.4e9)
791            # If thee number of IFs in the data is >= 2 IF0 gets the first
792            # value IF1 the second...
793            scan.set_restfreqs(freqs=[1.4e9, 1.67e9])
794            #set the given restfrequency for the whole table (by name)
795            scan.set_restfreqs(freqs="OH1667")
796
797        Note:
798            To do more sophisticate Restfrequency setting, e.g. on a
799            source and IF basis, use scantable.set_selection() before using
800            this function.
801            # provide your scantable is call scan
802            selection = selector()
803            selection.set_name("ORION*")
804            selection.set_ifs([1])
805            scan.set_selection(selection)
806            scan.set_restfreqs(freqs=86.6e9)
807
808        """
809        varlist = vars()
810        from asap import linecatalog
811        # simple  value
812        if isinstance(freqs, int) or isinstance(freqs, float):
813            self._setrestfreqs(freqs, "",unit)
814        # list of values
815        elif isinstance(freqs, list) or isinstance(freqs, tuple):
816            # list values are scalars
817            if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
818                sel = selector()
819                savesel = self._getselection()
820                iflist = self.getifnos()
821                for i in xrange(len(freqs)):
822                    sel.set_ifs(iflist[i])
823                    self._setselection(sel)
824                    self._setrestfreqs(freqs[i], "",unit)
825                self._setselection(savesel)
826            # list values are tuples, (value, name)
827            elif isinstance(freqs[-1], dict):
828                sel = selector()
829                savesel = self._getselection()
830                iflist = self.getifnos()
831                for i in xrange(len(freqs)):
832                    sel.set_ifs(iflist[i])
833                    self._setselection(sel)
834                    self._setrestfreqs(freqs[i]["value"],
835                                       freqs[i]["name"], "MHz")
836                self._setselection(savesel)
837        # freqs are to be taken from a linecatalog
838        elif isinstance(freqs, linecatalog):
839            sel = selector()
840            savesel = self._getselection()
841            iflist = self.getifnos()
842            for i in xrange(freqs.nrow()):
843                sel.set_ifs(iflist[i])
844                self._setselection(sel)
845                self._setrestfreqs(freqs.get_frequency(i),
846                                   freqs.get_name(i), "MHz")
847                # ensure that we are not iterating past nIF
848                if i == self.nif()-1: break
849            self._setselection(savesel)
850        else:
851            return
852        self._add_history("set_restfreqs", varlist)
853
854    def shift_refpix(self, delta):
855        """
856        Shift the reference pixel of the Spectra Coordinate by an
857        integer amount.
858        Parameters:
859            delta:   the amount to shift by
860        Note:
861            Be careful using this with broadband data.
862        """
863        Scantable.shift(self, delta)
864
865    def history(self, filename=None):
866        """
867        Print the history. Optionally to a file.
868        Parameters:
869            filename:    The name  of the file to save the history to.
870        """
871        hist = list(self._gethistory())
872        out = "-"*80
873        for h in hist:
874            if h.startswith("---"):
875                out += "\n"+h
876            else:
877                items = h.split("##")
878                date = items[0]
879                func = items[1]
880                items = items[2:]
881                out += "\n"+date+"\n"
882                out += "Function: %s\n  Parameters:" % (func)
883                for i in items:
884                    s = i.split("=")
885                    out += "\n   %s = %s" % (s[0], s[1])
886                out += "\n"+"-"*80
887        if filename is not None:
888            if filename is "":
889                filename = 'scantable_history.txt'
890            import os
891            filename = os.path.expandvars(os.path.expanduser(filename))
892            if not os.path.isdir(filename):
893                data = open(filename, 'w')
894                data.write(out)
895                data.close()
896            else:
897                msg = "Illegal file name '%s'." % (filename)
898                if rcParams['verbose']:
899                    print msg
900                else:
901                    raise IOError(msg)
902        if rcParams['verbose']:
903            try:
904                from IPython.genutils import page as pager
905            except ImportError:
906                from pydoc import pager
907            pager(out)
908        else:
909            return out
910        return
911    #
912    # Maths business
913    #
914
915    def average_time(self, mask=None, scanav=False, weight='tint', align=False):
916        """
917        Return the (time) weighted average of a scan.
918        Note:
919            in channels only - align if necessary
920        Parameters:
921            mask:     an optional mask (only used for 'var' and 'tsys'
922                      weighting)
923            scanav:   True averages each scan separately
924                      False (default) averages all scans together,
925            weight:   Weighting scheme.
926                      'none'     (mean no weight)
927                      'var'      (1/var(spec) weighted)
928                      'tsys'     (1/Tsys**2 weighted)
929                      'tint'     (integration time weighted)
930                      'tintsys'  (Tint/Tsys**2)
931                      'median'   ( median averaging)
932                      The default is 'tint'
933            align:    align the spectra in velocity before averaging. It takes
934                      the time of the first spectrum as reference time.
935        Example:
936            # time average the scantable without using a mask
937            newscan = scan.average_time()
938        """
939        varlist = vars()
940        if weight is None: weight = 'TINT'
941        if mask is None: mask = ()
942        if scanav: scanav = "SCAN"
943        else: scanav = "NONE"
944        scan = (self, )
945        try:
946            if align:
947                scan = (self.freq_align(insitu=False), )
948            s = None
949            if weight.upper() == 'MEDIAN':
950                s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
951                                                         scanav))
952            else:
953                s = scantable(self._math._average(scan, mask, weight.upper(),
954                              scanav))
955        except RuntimeError, msg:
956            if rcParams['verbose']:
957                print msg
958                return
959            else: raise
960        s._add_history("average_time", varlist)
961        print_log()
962        return s
963
964    def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
965        """
966        Return a scan where all spectra are converted to either
967        Jansky or Kelvin depending upon the flux units of the scan table.
968        By default the function tries to look the values up internally.
969        If it can't find them (or if you want to over-ride), you must
970        specify EITHER jyperk OR eta (and D which it will try to look up
971        also if you don't set it). jyperk takes precedence if you set both.
972        Parameters:
973            jyperk:      the Jy / K conversion factor
974            eta:         the aperture efficiency
975            d:           the geomtric diameter (metres)
976            insitu:      if False a new scantable is returned.
977                         Otherwise, the scaling is done in-situ
978                         The default is taken from .asaprc (False)
979        """
980        if insitu is None: insitu = rcParams['insitu']
981        self._math._setinsitu(insitu)
982        varlist = vars()
983        if jyperk is None: jyperk = -1.0
984        if d is None: d = -1.0
985        if eta is None: eta = -1.0
986        s = scantable(self._math._convertflux(self, d, eta, jyperk))
987        s._add_history("convert_flux", varlist)
988        print_log()
989        if insitu: self._assign(s)
990        else: return s
991
992    def gain_el(self, poly=None, filename="", method="linear", insitu=None):
993        """
994        Return a scan after applying a gain-elevation correction.
995        The correction can be made via either a polynomial or a
996        table-based interpolation (and extrapolation if necessary).
997        You specify polynomial coefficients, an ascii table or neither.
998        If you specify neither, then a polynomial correction will be made
999        with built in coefficients known for certain telescopes (an error
1000        will occur if the instrument is not known).
1001        The data and Tsys are *divided* by the scaling factors.
1002        Parameters:
1003            poly:        Polynomial coefficients (default None) to compute a
1004                         gain-elevation correction as a function of
1005                         elevation (in degrees).
1006            filename:    The name of an ascii file holding correction factors.
1007                         The first row of the ascii file must give the column
1008                         names and these MUST include columns
1009                         "ELEVATION" (degrees) and "FACTOR" (multiply data
1010                         by this) somewhere.
1011                         The second row must give the data type of the
1012                         column. Use 'R' for Real and 'I' for Integer.
1013                         An example file would be
1014                         (actual factors are arbitrary) :
1015
1016                         TIME ELEVATION FACTOR
1017                         R R R
1018                         0.1 0 0.8
1019                         0.2 20 0.85
1020                         0.3 40 0.9
1021                         0.4 60 0.85
1022                         0.5 80 0.8
1023                         0.6 90 0.75
1024            method:      Interpolation method when correcting from a table.
1025                         Values are  "nearest", "linear" (default), "cubic"
1026                         and "spline"
1027            insitu:      if False a new scantable is returned.
1028                         Otherwise, the scaling is done in-situ
1029                         The default is taken from .asaprc (False)
1030        """
1031
1032        if insitu is None: insitu = rcParams['insitu']
1033        self._math._setinsitu(insitu)
1034        varlist = vars()
1035        if poly is None:
1036            poly = ()
1037        from os.path import expandvars
1038        filename = expandvars(filename)
1039        s = scantable(self._math._gainel(self, poly, filename, method))
1040        s._add_history("gain_el", varlist)
1041        print_log()
1042        if insitu: self._assign(s)
1043        else: return s
1044
1045    def freq_align(self, reftime=None, method='cubic', insitu=None):
1046        """
1047        Return a scan where all rows have been aligned in frequency/velocity.
1048        The alignment frequency frame (e.g. LSRK) is that set by function
1049        set_freqframe.
1050        Parameters:
1051            reftime:     reference time to align at. By default, the time of
1052                         the first row of data is used.
1053            method:      Interpolation method for regridding the spectra.
1054                         Choose from "nearest", "linear", "cubic" (default)
1055                         and "spline"
1056            insitu:      if False a new scantable is returned.
1057                         Otherwise, the scaling is done in-situ
1058                         The default is taken from .asaprc (False)
1059        """
1060        if insitu is None: insitu = rcParams["insitu"]
1061        self._math._setinsitu(insitu)
1062        varlist = vars()
1063        if reftime is None: reftime = ""
1064        s = scantable(self._math._freq_align(self, reftime, method))
1065        s._add_history("freq_align", varlist)
1066        print_log()
1067        if insitu: self._assign(s)
1068        else: return s
1069
1070    def opacity(self, tau, insitu=None):
1071        """
1072        Apply an opacity correction. The data
1073        and Tsys are multiplied by the correction factor.
1074        Parameters:
1075            tau:         Opacity from which the correction factor is
1076                         exp(tau*ZD)
1077                         where ZD is the zenith-distance
1078            insitu:      if False a new scantable is returned.
1079                         Otherwise, the scaling is done in-situ
1080                         The default is taken from .asaprc (False)
1081        """
1082        if insitu is None: insitu = rcParams['insitu']
1083        self._math._setinsitu(insitu)
1084        varlist = vars()
1085        s = scantable(self._math._opacity(self, tau))
1086        s._add_history("opacity", varlist)
1087        print_log()
1088        if insitu: self._assign(s)
1089        else: return s
1090
1091    def bin(self, width=5, insitu=None):
1092        """
1093        Return a scan where all spectra have been binned up.
1094        Parameters:
1095            width:       The bin width (default=5) in pixels
1096            insitu:      if False a new scantable is returned.
1097                         Otherwise, the scaling is done in-situ
1098                         The default is taken from .asaprc (False)
1099        """
1100        if insitu is None: insitu = rcParams['insitu']
1101        self._math._setinsitu(insitu)
1102        varlist = vars()
1103        s = scantable(self._math._bin(self, width))
1104        s._add_history("bin", varlist)
1105        print_log()
1106        if insitu: self._assign(s)
1107        else: return s
1108
1109
1110    def resample(self, width=5, method='cubic', insitu=None):
1111        """
1112        Return a scan where all spectra have been binned up.
1113       
1114        Parameters:
1115            width:       The bin width (default=5) in pixels
1116            method:      Interpolation method when correcting from a table.
1117                         Values are  "nearest", "linear", "cubic" (default)
1118                         and "spline"
1119            insitu:      if False a new scantable is returned.
1120                         Otherwise, the scaling is done in-situ
1121                         The default is taken from .asaprc (False)
1122        """
1123        if insitu is None: insitu = rcParams['insitu']
1124        self._math._setinsitu(insitu)
1125        varlist = vars()
1126        s = scantable(self._math._resample(self, method, width))
1127        s._add_history("resample", varlist)
1128        print_log()
1129        if insitu: self._assign(s)
1130        else: return s
1131
1132
1133    def average_pol(self, mask=None, weight='none'):
1134        """
1135        Average the Polarisations together.
1136        Parameters:
1137            mask:        An optional mask defining the region, where the
1138                         averaging will be applied. The output will have all
1139                         specified points masked.
1140            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1141                         weighted), or 'tsys' (1/Tsys**2 weighted)
1142        """
1143        varlist = vars()
1144        if mask is None:
1145            mask = ()
1146        s = scantable(self._math._averagepol(self, mask, weight.upper()))
1147        s._add_history("average_pol", varlist)
1148        print_log()
1149        return s
1150
1151    def average_beam(self, mask=None, weight='none'):
1152        """
1153        Average the Beams together.
1154        Parameters:
1155            mask:        An optional mask defining the region, where the
1156                         averaging will be applied. The output will have all
1157                         specified points masked.
1158            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1159                         weighted), or 'tsys' (1/Tsys**2 weighted)
1160        """
1161        varlist = vars()
1162        if mask is None:
1163            mask = ()
1164        s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1165        s._add_history("average_beam", varlist)
1166        print_log()
1167        return s
1168
1169    def convert_pol(self, poltype=None):
1170        """
1171        Convert the data to a different polarisation type.
1172        Parameters:
1173            poltype:    The new polarisation type. Valid types are:
1174                        "linear", "stokes" and "circular"
1175        """
1176        varlist = vars()
1177        try:
1178            s = scantable(self._math._convertpol(self, poltype))
1179        except RuntimeError, msg:
1180            if rcParams['verbose']:
1181                print msg
1182                return
1183            else:
1184                raise
1185        s._add_history("convert_pol", varlist)
1186        print_log()
1187        return s
1188
1189    def smooth(self, kernel="hanning", width=5.0, insitu=None):
1190        """
1191        Smooth the spectrum by the specified kernel (conserving flux).
1192        Parameters:
1193            kernel:     The type of smoothing kernel. Select from
1194                        'hanning' (default), 'gaussian', 'boxcar' and
1195                        'rmedian'
1196            width:      The width of the kernel in pixels. For hanning this is
1197                        ignored otherwise it defauls to 5 pixels.
1198                        For 'gaussian' it is the Full Width Half
1199                        Maximum. For 'boxcar' it is the full width.
1200                        For 'rmedian' it is the half width.
1201            insitu:     if False a new scantable is returned.
1202                        Otherwise, the scaling is done in-situ
1203                        The default is taken from .asaprc (False)
1204        Example:
1205             none
1206        """
1207        if insitu is None: insitu = rcParams['insitu']
1208        self._math._setinsitu(insitu)
1209        varlist = vars()
1210        s = scantable(self._math._smooth(self, kernel.lower(), width))
1211        s._add_history("smooth", varlist)
1212        print_log()
1213        if insitu: self._assign(s)
1214        else: return s
1215
1216
1217    def poly_baseline(self, mask=None, order=0, plot=False, insitu=None):
1218        """
1219        Return a scan which has been baselined (all rows) by a polynomial.
1220        Parameters:
1221            mask:       an optional mask
1222            order:      the order of the polynomial (default is 0)
1223            plot:       plot the fit and the residual. In this each
1224                        indivual fit has to be approved, by typing 'y'
1225                        or 'n'
1226            insitu:     if False a new scantable is returned.
1227                        Otherwise, the scaling is done in-situ
1228                        The default is taken from .asaprc (False)
1229        Example:
1230            # return a scan baselined by a third order polynomial,
1231            # not using a mask
1232            bscan = scan.poly_baseline(order=3)
1233        """
1234        if insitu is None: insitu = rcParams['insitu']
1235        varlist = vars()
1236        if mask is None:
1237            mask = [True for i in xrange(self.nchan(-1))]
1238        from asap.asapfitter import fitter
1239        try:
1240            f = fitter()
1241            f.set_scan(self, mask)
1242            f.set_function(poly=order)
1243            s = f.auto_fit(insitu, plot=plot)
1244            s._add_history("poly_baseline", varlist)
1245            print_log()
1246            if insitu: self._assign(s)
1247            else: return s
1248        except RuntimeError:
1249            msg = "The fit failed, possibly because it didn't converge."
1250            if rcParams['verbose']:
1251                print msg
1252                return
1253            else:
1254                raise RuntimeError(msg)
1255
1256
1257    def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
1258                           threshold=3, chan_avg_limit=1, plot=False,
1259                           insitu=None):
1260        """
1261        Return a scan which has been baselined (all rows) by a polynomial.
1262        Spectral lines are detected first using linefinder and masked out
1263        to avoid them affecting the baseline solution.
1264
1265        Parameters:
1266            mask:       an optional mask retreived from scantable
1267            edge:       an optional number of channel to drop at
1268                        the edge of spectrum. If only one value is
1269                        specified, the same number will be dropped from
1270                        both sides of the spectrum. Default is to keep
1271                        all channels. Nested tuples represent individual
1272                        edge selection for different IFs (a number of spectral
1273                        channels can be different)
1274            order:      the order of the polynomial (default is 0)
1275            threshold:  the threshold used by line finder. It is better to
1276                        keep it large as only strong lines affect the
1277                        baseline solution.
1278            chan_avg_limit:
1279                        a maximum number of consequtive spectral channels to
1280                        average during the search of weak and broad lines.
1281                        The default is no averaging (and no search for weak
1282                        lines). If such lines can affect the fitted baseline
1283                        (e.g. a high order polynomial is fitted), increase this
1284                        parameter (usually values up to 8 are reasonable). Most
1285                        users of this method should find the default value
1286                        sufficient.
1287            plot:       plot the fit and the residual. In this each
1288                        indivual fit has to be approved, by typing 'y'
1289                        or 'n'
1290            insitu:     if False a new scantable is returned.
1291                        Otherwise, the scaling is done in-situ
1292                        The default is taken from .asaprc (False)
1293
1294        Example:
1295            scan2=scan.auto_poly_baseline(order=7)
1296        """
1297        if insitu is None: insitu = rcParams['insitu']
1298        varlist = vars()
1299        from asap.asapfitter import fitter
1300        from asap.asaplinefind import linefinder
1301        from asap import _is_sequence_or_number as _is_valid
1302
1303        # check whether edge is set up for each IF individually
1304        individualedge = False;
1305        if len(edge) > 1:
1306            if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1307                individualedge = True;
1308
1309        if not _is_valid(edge, int) and not individualedge:
1310            raise ValueError, "Parameter 'edge' has to be an integer or a \
1311            pair of integers specified as a tuple. Nested tuples are allowed \
1312            to make individual selection for different IFs."
1313
1314        curedge = (0, 0)
1315        if individualedge:
1316            for edgepar in edge:
1317                if not _is_valid(edgepar, int):
1318                    raise ValueError, "Each element of the 'edge' tuple has \
1319                                       to be a pair of integers or an integer."
1320        else:
1321            curedge = edge;
1322
1323        # setup fitter
1324        f = fitter()
1325        f.set_function(poly=order)
1326
1327        # setup line finder
1328        fl = linefinder()
1329        fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
1330
1331        if not insitu:
1332            workscan = self.copy()
1333        else:
1334            workscan = self
1335
1336        fl.set_scan(workscan)
1337
1338        rows = range(workscan.nrow())
1339        asaplog.push("Processing:")
1340        for r in rows:
1341            msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1342                (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1343                 workscan.getpol(r), workscan.getcycle(r))
1344            asaplog.push(msg, False)
1345
1346            # figure out edge parameter
1347            if individualedge:
1348                if len(edge) >= workscan.getif(r):
1349                    raise RuntimeError, "Number of edge elements appear to " \
1350                                        "be less than the number of IFs"
1351                    curedge = edge[workscan.getif(r)]
1352
1353            # setup line finder
1354            fl.find_lines(r, mask, curedge)
1355            f.set_scan(workscan, fl.get_mask())
1356            f.x = workscan._getabcissa(r)
1357            f.y = workscan._getspectrum(r)
1358            f.data = None
1359            f.fit()
1360            x = f.get_parameters()
1361            if plot:
1362                f.plot(residual=True)
1363                x = raw_input("Accept fit ( [y]/n ): ")
1364                if x.upper() == 'N':
1365                    continue
1366            workscan._setspectrum(f.fitter.getresidual(), r)
1367        if plot:
1368            f._p.unmap()
1369            f._p = None
1370        workscan._add_history("auto_poly_baseline", varlist)
1371        if insitu:
1372            self._assign(workscan)
1373        else:
1374            return workscan
1375
1376    def rotate_linpolphase(self, angle):
1377        """
1378        Rotate the phase of the complex polarization O=Q+iU correlation.
1379        This is always done in situ in the raw data.  So if you call this
1380        function more than once then each call rotates the phase further.
1381        Parameters:
1382            angle:   The angle (degrees) to rotate (add) by.
1383        Examples:
1384            scan.rotate_linpolphase(2.3)
1385        """
1386        varlist = vars()
1387        self._math._rotate_linpolphase(self, angle)
1388        self._add_history("rotate_linpolphase", varlist)
1389        print_log()
1390        return
1391
1392
1393    def rotate_xyphase(self, angle):
1394        """
1395        Rotate the phase of the XY correlation.  This is always done in situ
1396        in the data.  So if you call this function more than once
1397        then each call rotates the phase further.
1398        Parameters:
1399            angle:   The angle (degrees) to rotate (add) by.
1400        Examples:
1401            scan.rotate_xyphase(2.3)
1402        """
1403        varlist = vars()
1404        self._math._rotate_xyphase(self, angle)
1405        self._add_history("rotate_xyphase", varlist)
1406        print_log()
1407        return
1408
1409    def swap_linears(self):
1410        """
1411        Swap the linear polarisations XX and YY, or better the first two
1412        polarisations as this also works for ciculars.
1413        """
1414        varlist = vars()
1415        self._math._swap_linears(self)
1416        self._add_history("swap_linears", varlist)
1417        print_log()
1418        return
1419
1420    def invert_phase(self):
1421        """
1422        Invert the phase of the complex polarisation
1423        """
1424        varlist = vars()
1425        self._math._invert_phase(self)
1426        self._add_history("invert_phase", varlist)
1427        print_log()
1428        return
1429
1430    def add(self, offset, insitu=None):
1431        """
1432        Return a scan where all spectra have the offset added
1433        Parameters:
1434            offset:      the offset
1435            insitu:      if False a new scantable is returned.
1436                         Otherwise, the scaling is done in-situ
1437                         The default is taken from .asaprc (False)
1438        """
1439        if insitu is None: insitu = rcParams['insitu']
1440        self._math._setinsitu(insitu)
1441        varlist = vars()
1442        s = scantable(self._math._unaryop(self, offset, "ADD", False))
1443        s._add_history("add", varlist)
1444        print_log()
1445        if insitu:
1446            self._assign(s)
1447        else:
1448            return s
1449
1450    def scale(self, factor, tsys=True, insitu=None):
1451        """
1452        Return a scan where all spectra are scaled by the give 'factor'
1453        Parameters:
1454            factor:      the scaling factor
1455            insitu:      if False a new scantable is returned.
1456                         Otherwise, the scaling is done in-situ
1457                         The default is taken from .asaprc (False)
1458            tsys:        if True (default) then apply the operation to Tsys
1459                         as well as the data
1460        """
1461        if insitu is None: insitu = rcParams['insitu']
1462        self._math._setinsitu(insitu)
1463        varlist = vars()
1464        s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
1465        s._add_history("scale", varlist)
1466        print_log()
1467        if insitu:
1468            self._assign(s)
1469        else:
1470            return s
1471
1472    def auto_quotient(self, preserve=True, mode='paired'):
1473        """
1474        This function allows to build quotients automatically.
1475        It assumes the observation to have the same numer of
1476        "ons" and "offs"
1477        Parameters:
1478            preserve:       you can preserve (default) the continuum or
1479                            remove it.  The equations used are
1480                            preserve: Output = Toff * (on/off) - Toff
1481                            remove:   Output = Toff * (on/off) - Ton
1482            mode:           the on/off detection mode
1483                            'paired' (default)
1484                            identifies 'off' scans by the
1485                            trailing '_R' (Mopra/Parkes) or
1486                            '_e'/'_w' (Tid) and matches
1487                            on/off pairs from the observing pattern
1488                'time'
1489                   finds the closest off in time
1490
1491        """
1492        modes = ["time", "paired"]
1493        if not mode in modes:
1494            msg = "please provide valid mode. Valid modes are %s" % (modes)
1495            raise ValueError(msg)
1496        varlist = vars()
1497        s = None
1498        if mode.lower() == "paired":
1499            basesel = self.get_selection()
1500            sel = selector()+basesel
1501            sel.set_query("SRCTYPE==1")
1502            self.set_selection(sel)
1503            offs = self.copy()
1504            sel.set_query("SRCTYPE==0")
1505            self.set_selection(sel)
1506            ons = self.copy()
1507            s = scantable(self._math._quotient(ons, offs, preserve))
1508            self.set_selection(basesel)
1509        elif mode.lower() == "time":
1510            s = scantable(self._math._auto_quotient(self, mode, preserve))
1511        s._add_history("auto_quotient", varlist)
1512        print_log()
1513        return s
1514
1515    def mx_quotient(self, mask = None, weight='median', preserve=True):
1516        """
1517        Form a quotient using "off" beams when observing in "MX" mode.
1518        Parameters:
1519            mask:           an optional mask to be used when weight == 'stddev'
1520            weight:         How to average the off beams.  Default is 'median'.
1521            preserve:       you can preserve (default) the continuum or
1522                            remove it.  The equations used are
1523                            preserve: Output = Toff * (on/off) - Toff
1524                            remove:   Output = Toff * (on/off) - Ton
1525        """
1526        if mask is None: mask = ()
1527        varlist = vars()
1528        on = scantable(self._math._mx_extract(self, 'on'))
1529        preoff = scantable(self._math._mx_extract(self, 'off'))
1530        off = preoff.average_time(mask=mask, weight=weight, scanav=False)
1531        from asapmath  import quotient
1532        q = quotient(on, off, preserve)
1533        q._add_history("mx_quotient", varlist)
1534        print_log()
1535        return q
1536
1537    def freq_switch(self, insitu=None):
1538        """
1539        Apply frequency switching to the data.
1540        Parameters:
1541            insitu:      if False a new scantable is returned.
1542                         Otherwise, the swictching is done in-situ
1543                         The default is taken from .asaprc (False)
1544        Example:
1545            none
1546        """
1547        if insitu is None: insitu = rcParams['insitu']
1548        self._math._setinsitu(insitu)
1549        varlist = vars()
1550        s = scantable(self._math._freqswitch(self))
1551        s._add_history("freq_switch", varlist)
1552        print_log()
1553        if insitu: self._assign(s)
1554        else: return s
1555
1556    def recalc_azel(self):
1557        """
1558        Recalculate the azimuth and elevation for each position.
1559        Parameters:
1560            none
1561        Example:
1562        """
1563        varlist = vars()
1564        self._recalcazel()
1565        self._add_history("recalc_azel", varlist)
1566        print_log()
1567        return
1568
1569    def __add__(self, other):
1570        varlist = vars()
1571        s = None
1572        if isinstance(other, scantable):
1573            s = scantable(self._math._binaryop(self, other, "ADD"))
1574        elif isinstance(other, float):
1575            s = scantable(self._math._unaryop(self, other, "ADD", False))
1576        else:
1577            raise TypeError("Other input is not a scantable or float value")
1578        s._add_history("operator +", varlist)
1579        print_log()
1580        return s
1581
1582    def __sub__(self, other):
1583        """
1584        implicit on all axes and on Tsys
1585        """
1586        varlist = vars()
1587        s = None
1588        if isinstance(other, scantable):
1589            s = scantable(self._math._binaryop(self, other, "SUB"))
1590        elif isinstance(other, float):
1591            s = scantable(self._math._unaryop(self, other, "SUB", False))
1592        else:
1593            raise TypeError("Other input is not a scantable or float value")
1594        s._add_history("operator -", varlist)
1595        print_log()
1596        return s
1597
1598    def __mul__(self, other):
1599        """
1600        implicit on all axes and on Tsys
1601        """
1602        varlist = vars()
1603        s = None
1604        if isinstance(other, scantable):
1605            s = scantable(self._math._binaryop(self, other, "MUL"))
1606        elif isinstance(other, float):
1607            s = scantable(self._math._unaryop(self, other, "MUL", False))
1608        else:
1609            raise TypeError("Other input is not a scantable or float value")
1610        s._add_history("operator *", varlist)
1611        print_log()
1612        return s
1613
1614
1615    def __div__(self, other):
1616        """
1617        implicit on all axes and on Tsys
1618        """
1619        varlist = vars()
1620        s = None
1621        if isinstance(other, scantable):
1622            s = scantable(self._math._binaryop(self, other, "DIV"))
1623        elif isinstance(other, float):
1624            if other == 0.0:
1625                raise ZeroDivisionError("Dividing by zero is not recommended")
1626            s = scantable(self._math._unaryop(self, other, "DIV", False))
1627        else:
1628            raise TypeError("Other input is not a scantable or float value")
1629        s._add_history("operator /", varlist)
1630        print_log()
1631        return s
1632
1633    def get_fit(self, row=0):
1634        """
1635        Print or return the stored fits for a row in the scantable
1636        Parameters:
1637            row:    the row which the fit has been applied to.
1638        """
1639        if row > self.nrow():
1640            return
1641        from asap.asapfit import asapfit
1642        fit = asapfit(self._getfit(row))
1643        if rcParams['verbose']:
1644            print fit
1645            return
1646        else:
1647            return fit.as_dict()
1648
1649    def _add_history(self, funcname, parameters):
1650        # create date
1651        sep = "##"
1652        from datetime import datetime
1653        dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1654        hist = dstr+sep
1655        hist += funcname+sep#cdate+sep
1656        if parameters.has_key('self'): del parameters['self']
1657        for k, v in parameters.iteritems():
1658            if type(v) is dict:
1659                for k2, v2 in v.iteritems():
1660                    hist += k2
1661                    hist += "="
1662                    if isinstance(v2, scantable):
1663                        hist += 'scantable'
1664                    elif k2 == 'mask':
1665                        if isinstance(v2, list) or isinstance(v2, tuple):
1666                            hist += str(self._zip_mask(v2))
1667                        else:
1668                            hist += str(v2)
1669                    else:
1670                        hist += str(v2)
1671            else:
1672                hist += k
1673                hist += "="
1674                if isinstance(v, scantable):
1675                    hist += 'scantable'
1676                elif k == 'mask':
1677                    if isinstance(v, list) or isinstance(v, tuple):
1678                        hist += str(self._zip_mask(v))
1679                    else:
1680                        hist += str(v)
1681                else:
1682                    hist += str(v)
1683            hist += sep
1684        hist = hist[:-2] # remove trailing '##'
1685        self._addhistory(hist)
1686
1687
1688    def _zip_mask(self, mask):
1689        mask = list(mask)
1690        i = 0
1691        segments = []
1692        while mask[i:].count(1):
1693            i += mask[i:].index(1)
1694            if mask[i:].count(0):
1695                j = i + mask[i:].index(0)
1696            else:
1697                j = len(mask)
1698            segments.append([i, j])
1699            i = j
1700        return segments
1701
1702    def _get_ordinate_label(self):
1703        fu = "("+self.get_fluxunit()+")"
1704        import re
1705        lbl = "Intensity"
1706        if re.match(".K.", fu):
1707            lbl = "Brightness Temperature "+ fu
1708        elif re.match(".Jy.", fu):
1709            lbl = "Flux density "+ fu
1710        return lbl
1711
1712    def _check_ifs(self):
1713        nchans = [self.nchan(i) for i in range(self.nif(-1))]
1714        nchans = filter(lambda t: t > 0, nchans)
1715        return (sum(nchans)/len(nchans) == nchans[0])
1716
1717    def _fill(self, names, unit, average):
1718        import os
1719        from asap._asap import stfiller
1720        first = True
1721        fullnames = []
1722        for name in names:
1723            name = os.path.expandvars(name)
1724            name = os.path.expanduser(name)
1725            if not os.path.exists(name):
1726                msg = "File '%s' does not exists" % (name)
1727                if rcParams['verbose']:
1728                    asaplog.push(msg)
1729                    print asaplog.pop().strip()
1730                    return
1731                raise IOError(msg)
1732            fullnames.append(name)
1733        if average:
1734            asaplog.push('Auto averaging integrations')
1735        stype = int(rcParams['scantable.storage'].lower() == 'disk')
1736        for name in fullnames:
1737            tbl = Scantable(stype)
1738            r = stfiller(tbl)
1739            msg = "Importing %s..." % (name)
1740            asaplog.push(msg, False)
1741            print_log()
1742            r._open(name, -1, -1)
1743            r._read()
1744            #tbl = r._getdata()
1745            if average:
1746                tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
1747                #tbl = tbl2
1748            if not first:
1749                tbl = self._math._merge([self, tbl])
1750                #tbl = tbl2
1751            Scantable.__init__(self, tbl)
1752            r._close()
1753            del r, tbl
1754            first = False
1755        if unit is not None:
1756            self.set_fluxunit(unit)
1757        self.set_freqframe(rcParams['scantable.freqframe'])
1758
Note: See TracBrowser for help on using the repository browser.