source: trunk/python/scantable.py @ 1356

Last change on this file since 1356 was 1356, checked in by mar637, 17 years ago

fix to auto_quotient; the selector add_ operator ANDs the query which is not wnated in this case

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 65.1 KB
Line 
1from asap._asap import Scantable
2from asap import rcParams
3from asap import print_log
4from asap import asaplog
5from asap import selector
6from asap import linecatalog
7from asap import _n_bools, mask_not, mask_and, mask_or
8
9class scantable(Scantable):
10    """
11        The ASAP container for scans
12    """
13
14    def __init__(self, filename, average=None, unit=None):
15        """
16        Create a scantable from a saved one or make a reference
17        Parameters:
18            filename:    the name of an asap table on disk
19                         or
20                         the name of a rpfits/sdfits/ms file
21                         (integrations within scans are auto averaged
22                         and the whole file is read)
23                         or
24                         [advanced] a reference to an existing
25                         scantable
26            average:     average all integrations withinb a scan on read.
27                         The default (True) is taken from .asaprc.
28            unit:         brightness unit; must be consistent with K or Jy.
29                         Over-rides the default selected by the reader
30                         (input rpfits/sdfits/ms) or replaces the value
31                         in existing scantables
32        """
33        if average is None:
34            average = rcParams['scantable.autoaverage']
35        varlist = vars()
36        from asap._asap import stmath
37        self._math = stmath()
38        if isinstance(filename, Scantable):
39            Scantable.__init__(self, filename)
40        else:
41            if isinstance(filename, str):
42                import os.path
43                filename = os.path.expandvars(filename)
44                filename = os.path.expanduser(filename)
45                if not os.path.exists(filename):
46                    s = "File '%s' not found." % (filename)
47                    if rcParams['verbose']:
48                        asaplog.push(s)
49                        print asaplog.pop().strip()
50                        return
51                    raise IOError(s)
52                if os.path.isdir(filename) \
53                    and not os.path.exists(filename+'/table.f1'):
54                    # crude check if asap table
55                    if os.path.exists(filename+'/table.info'):
56                        ondisk = rcParams['scantable.storage'] == 'disk'
57                        Scantable.__init__(self, filename, ondisk)
58                        if unit is not None:
59                            self.set_fluxunit(unit)
60                        self.set_freqframe(rcParams['scantable.freqframe'])
61                    else:
62                        msg = "The given file '%s'is not a valid " \
63                              "asap table." % (filename)
64                        if rcParams['verbose']:
65                            print msg
66                            return
67                        else:
68                            raise IOError(msg)
69                else:
70                    self._fill([filename], unit, average)
71            elif (isinstance(filename, list) or isinstance(filename, tuple)) \
72                  and isinstance(filename[-1], str):
73                self._fill(filename, unit, average)
74        self._add_history("scantable", varlist)
75        print_log()
76
77    def save(self, name=None, format=None, overwrite=False):
78        """
79        Store the scantable on disk. This can be an asap (aips++) Table,
80        SDFITS or MS2 format.
81        Parameters:
82            name:        the name of the outputfile. For format "ASCII"
83                         this is the root file name (data in 'name'.txt
84                         and header in 'name'_header.txt)
85            format:      an optional file format. Default is ASAP.
86                         Allowed are - 'ASAP' (save as ASAP [aips++] Table),
87                                       'SDFITS' (save as SDFITS file)
88                                       'ASCII' (saves as ascii text file)
89                                       'MS2' (saves as an aips++
90                                              MeasurementSet V2)
91            overwrite:   If the file should be overwritten if it exists.
92                         The default False is to return with warning
93                         without writing the output. USE WITH CARE.
94        Example:
95            scan.save('myscan.asap')
96            scan.save('myscan.sdfits', 'SDFITS')
97        """
98        from os import path
99        if format is None: format = rcParams['scantable.save']
100        suffix = '.'+format.lower()
101        if name is None or name == "":
102            name = 'scantable'+suffix
103            msg = "No filename given. Using default name %s..." % name
104            asaplog.push(msg)
105        name = path.expandvars(name)
106        if path.isfile(name) or path.isdir(name):
107            if not overwrite:
108                msg = "File %s exists." % name
109                if rcParams['verbose']:
110                    print msg
111                    return
112                else:
113                    raise IOError(msg)
114        format2 = format.upper()
115        if format2 == 'ASAP':
116            self._save(name)
117        else:
118            from asap._asap import stwriter as stw
119            writer = stw(format2)
120            writer.write(self, name)
121        print_log()
122        return
123
124    def copy(self):
125        """
126        Return a copy of this scantable.
127        Note:
128            This makes a full (deep) copy. scan2 = scan1 makes a reference.
129        Parameters:
130            none
131        Example:
132            copiedscan = scan.copy()
133        """
134        sd = scantable(Scantable._copy(self))
135        return sd
136
137    def drop_scan(self, scanid=None):
138        """
139        Return a new scantable where the specified scan number(s) has(have)
140        been dropped.
141        Parameters:
142            scanid:    a (list of) scan number(s)
143        """
144        from asap import _is_sequence_or_number as _is_valid
145        from asap import _to_list
146        from asap import unique
147        if not _is_valid(scanid):
148            if rcParams['verbose']:
149                print "Please specify a scanno to drop from the scantable"
150                return
151            else:
152                raise RuntimeError("No scan given")
153        try:
154            scanid = _to_list(scanid)
155            allscans = unique([ self.getscan(i) for i in range(self.nrow())])
156            for sid in scanid: allscans.remove(sid)
157            if len(allscans) == 0:
158                raise ValueError("Can't remove all scans")
159        except ValueError:
160            if rcParams['verbose']:
161                print "Couldn't find any match."
162                return
163            else: raise
164        try:
165            bsel = self.get_selection()
166            sel = selector()
167            sel.set_scans(allscans)
168            self.set_selection(bsel+sel)
169            scopy = self._copy()
170            self.set_selection(bsel)
171            return scantable(scopy)
172        except RuntimeError:
173            if rcParams['verbose']:
174                print "Couldn't find any match."
175            else:
176                raise
177
178
179    def get_scan(self, scanid=None):
180        """
181        Return a specific scan (by scanno) or collection of scans (by
182        source name) in a new scantable.
183        Note:
184            See scantable.drop_scan() for the inverse operation.
185        Parameters:
186            scanid:    a (list of) scanno or a source name, unix-style
187                       patterns are accepted for source name matching, e.g.
188                       '*_R' gets all 'ref scans
189        Example:
190            # get all scans containing the source '323p459'
191            newscan = scan.get_scan('323p459')
192            # get all 'off' scans
193            refscans = scan.get_scan('*_R')
194            # get a susbset of scans by scanno (as listed in scan.summary())
195            newscan = scan.get_scan([0, 2, 7, 10])
196        """
197        if scanid is None:
198            if rcParams['verbose']:
199                print "Please specify a scan no or name to " \
200                      "retrieve from the scantable"
201                return
202            else:
203                raise RuntimeError("No scan given")
204
205        try:
206            bsel = self.get_selection()
207            sel = selector()
208            if type(scanid) is str:
209                sel.set_name(scanid)
210                self.set_selection(bsel+sel)
211                scopy = self._copy()
212                self.set_selection(bsel)
213                return scantable(scopy)
214            elif type(scanid) is int:
215                sel.set_scans([scanid])
216                self.set_selection(bsel+sel)
217                scopy = self._copy()
218                self.set_selection(bsel)
219                return scantable(scopy)
220            elif type(scanid) is list:
221                sel.set_scans(scanid)
222                self.set_selection(sel)
223                scopy = self._copy()
224                self.set_selection(bsel)
225                return scantable(scopy)
226            else:
227                msg = "Illegal scanid type, use 'int' or 'list' if ints."
228                if rcParams['verbose']:
229                    print msg
230                else:
231                    raise TypeError(msg)
232        except RuntimeError:
233            if rcParams['verbose']: print "Couldn't find any match."
234            else: raise
235
236    def __str__(self):
237        return Scantable._summary(self, True)
238
239    def summary(self, filename=None):
240        """
241        Print a summary of the contents of this scantable.
242        Parameters:
243            filename:    the name of a file to write the putput to
244                         Default - no file output
245            verbose:     print extra info such as the frequency table
246                         The default (False) is taken from .asaprc
247        """
248        info = Scantable._summary(self, True)
249        #if verbose is None: verbose = rcParams['scantable.verbosesummary']
250        if filename is not None:
251            if filename is "":
252                filename = 'scantable_summary.txt'
253            from os.path import expandvars, isdir
254            filename = expandvars(filename)
255            if not isdir(filename):
256                data = open(filename, 'w')
257                data.write(info)
258                data.close()
259            else:
260                msg = "Illegal file name '%s'." % (filename)
261                if rcParams['verbose']:
262                    print msg
263                else:
264                    raise IOError(msg)
265        if rcParams['verbose']:
266            try:
267                from IPython.genutils import page as pager
268            except ImportError:
269                from pydoc import pager
270            pager(info)
271        else:
272            return info
273
274
275    def get_selection(self):
276        """
277        Get the selection object currently set on this scantable.
278        Parameters:
279            none
280        Example:
281            sel = scan.get_selection()
282            sel.set_ifs(0)              # select IF 0
283            scan.set_selection(sel)     # apply modified selection
284        """
285        return selector(self._getselection())
286
287    def set_selection(self, selection=selector()):
288        """
289        Select a subset of the data. All following operations on this scantable
290        are only applied to thi selection.
291        Parameters:
292            selection:    a selector object (default unset the selection)
293        Examples:
294            sel = selector()         # create a selection object
295            self.set_scans([0, 3])    # select SCANNO 0 and 3
296            scan.set_selection(sel)  # set the selection
297            scan.summary()           # will only print summary of scanno 0 an 3
298            scan.set_selection()     # unset the selection
299        """
300        self._setselection(selection)
301
302    def stats(self, stat='stddev', mask=None):
303        """
304        Determine the specified statistic of the current beam/if/pol
305        Takes a 'mask' as an optional parameter to specify which
306        channels should be excluded.
307        Parameters:
308            stat:    'min', 'max', 'sumsq', 'sum', 'mean'
309                     'var', 'stddev', 'avdev', 'rms', 'median'
310            mask:    an optional mask specifying where the statistic
311                     should be determined.
312        Example:
313            scan.set_unit('channel')
314            msk = scan.create_mask([100, 200], [500, 600])
315            scan.stats(stat='mean', mask=m)
316        """
317        if mask == None:
318            mask = []
319        axes = ['Beam', 'IF', 'Pol', 'Time']
320        if not self._check_ifs():
321            raise ValueError("Cannot apply mask as the IFs have different "
322                             "number of channels. Please use setselection() "
323                             "to select individual IFs")
324
325        statvals = self._math._stats(self, mask, stat)
326        out = ''
327        axes = []
328        for i in range(self.nrow()):
329            axis = []
330            axis.append(self.getscan(i))
331            axis.append(self.getbeam(i))
332            axis.append(self.getif(i))
333            axis.append(self.getpol(i))
334            axis.append(self.getcycle(i))
335            axes.append(axis)
336            tm = self._gettime(i)
337            src = self._getsourcename(i)
338            out += 'Scan[%d] (%s) ' % (axis[0], src)
339            out += 'Time[%s]:\n' % (tm)
340            if self.nbeam(-1) > 1: out +=  ' Beam[%d] ' % (axis[1])
341            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (axis[2])
342            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (axis[3])
343            out += '= %3.3f\n' % (statvals[i])
344            out +=  "--------------------------------------------------\n"
345
346        if rcParams['verbose']:
347            print "--------------------------------------------------"
348            print " ", stat
349            print "--------------------------------------------------"
350            print out
351        #else:
352            #retval = { 'axesnames': ['scanno', 'beamno', 'ifno', 'polno', 'cycleno'],
353            #           'axes' : axes,
354            #           'data': statvals}
355        return statvals
356
357    def stddev(self, mask=None):
358        """
359        Determine the standard deviation of the current beam/if/pol
360        Takes a 'mask' as an optional parameter to specify which
361        channels should be excluded.
362        Parameters:
363            mask:    an optional mask specifying where the standard
364                     deviation should be determined.
365
366        Example:
367            scan.set_unit('channel')
368            msk = scan.create_mask([100, 200], [500, 600])
369            scan.stddev(mask=m)
370        """
371        return self.stats(stat='stddev', mask=mask);
372
373
374    def get_column_names(self):
375        """
376        Return a  list of column names, which can be used for selection.
377        """
378        return list(Scantable.get_column_names(self))
379
380    def get_tsys(self):
381        """
382        Return the System temperatures.
383        Returns:
384            a list of Tsys values for the current selection
385        """
386
387        return self._row_callback(self._gettsys, "Tsys")
388
389    def _row_callback(self, callback, label):
390        axes = []
391        axesnames = ['scanno', 'beamno', 'ifno', 'polno', 'cycleno']
392        out = ""
393        outvec = []
394        for i in range(self.nrow()):
395            axis = []
396            axis.append(self.getscan(i))
397            axis.append(self.getbeam(i))
398            axis.append(self.getif(i))
399            axis.append(self.getpol(i))
400            axis.append(self.getcycle(i))
401            axes.append(axis)
402            tm = self._gettime(i)
403            src = self._getsourcename(i)
404            out += 'Scan[%d] (%s) ' % (axis[0], src)
405            out += 'Time[%s]:\n' % (tm)
406            if self.nbeam(-1) > 1: out +=  ' Beam[%d] ' % (axis[1])
407            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (axis[2])
408            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (axis[3])
409            outvec.append(callback(i))
410            out += '= %3.3f\n' % (outvec[i])
411            out +=  "--------------------------------------------------\n"
412        if rcParams['verbose']:
413            print "--------------------------------------------------"
414            print " %s" % (label)
415            print "--------------------------------------------------"
416            print out
417        # disabled because the vector seems more useful
418        #retval = {'axesnames': axesnames, 'axes': axes, 'data': outvec}
419        return outvec
420
421    def _get_column(self, callback, row=-1):
422        """
423        """
424        if row == -1:
425            return [callback(i) for i in range(self.nrow())]
426        else:
427            if  0 <= row < self.nrow():
428                return callback(row)
429
430
431    def get_time(self, row=-1, asdatetime=False):
432        """
433        Get a list of time stamps for the observations.
434        Return a datetime object for each integration time stamp in the scantable.
435        Parameters:
436            row:          row no of integration. Default -1 return all rows
437            asdatetime:   return values as datetime objects rather than strings
438        Example:
439            none
440        """
441        from time import strptime
442        from datetime import datetime
443        if not asdatetime:
444            return self._get_column(self._gettime, row)
445        format = "%Y/%m/%d/%H:%M:%S"
446        if isinstance(times, list):
447            return [datetime(*strptime(i, format)[:6]) for i in times]
448        else:
449            return datetime(*strptime(times, format)[:6])
450
451
452    def get_inttime(self, row=-1):
453        """
454        Get a list of integration times for the observations.
455        Return a time in seconds for each integration in the scantable.
456        Parameters:
457            row:    row no of integration. Default -1 return all rows.
458        Example:
459            none
460        """
461        return self._get_column(self._getinttime, row)       
462       
463
464    def get_sourcename(self, row=-1):
465        """
466        Get a list source names for the observations.
467        Return a string for each integration in the scantable.
468        Parameters:
469            row:    row no of integration. Default -1 return all rows.
470        Example:
471            none
472        """
473        return self._get_column(self._getsourcename, row)
474
475    def get_elevation(self, row=-1):
476        """
477        Get a list of elevations for the observations.
478        Return a float for each integration in the scantable.
479        Parameters:
480            row:    row no of integration. Default -1 return all rows.
481        Example:
482            none
483        """
484        return self._get_column(self._getelevation, row)
485
486    def get_azimuth(self, row=-1):
487        """
488        Get a list of azimuths for the observations.
489        Return a float for each integration in the scantable.
490        Parameters:
491            row:    row no of integration. Default -1 return all rows.
492        Example:
493            none
494        """
495        return self._get_column(self._getazimuth, row)
496
497    def get_parangle(self, row=-1):
498        """
499        Get a list of parallactic angles for the observations.
500        Return a float for each integration in the scantable.
501        Parameters:
502            row:    row no of integration. Default -1 return all rows.
503        Example:
504            none
505        """
506        return self._get_column(self._getparangle, row)
507
508    def get_direction(self, row=-1):
509        """
510        Get a list of Positions on the sky (direction) for the observations.
511        Return a float for each integration in the scantable.
512        Parameters:
513            row:    row no of integration. Default -1 return all rows
514        Example:
515            none
516        """
517        return self._get_column(self._getdirection, row)
518
519    def set_unit(self, unit='channel'):
520        """
521        Set the unit for all following operations on this scantable
522        Parameters:
523            unit:    optional unit, default is 'channel'
524                     one of '*Hz', 'km/s', 'channel', ''
525        """
526        varlist = vars()
527        if unit in ['', 'pixel', 'channel']:
528            unit = ''
529        inf = list(self._getcoordinfo())
530        inf[0] = unit
531        self._setcoordinfo(inf)
532        self._add_history("set_unit", varlist)
533
534    def set_instrument(self, instr):
535        """
536        Set the instrument for subsequent processing.
537        Parameters:
538            instr:    Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
539                      'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
540        """
541        self._setInstrument(instr)
542        self._add_history("set_instument", vars())
543        print_log()
544
545    def set_feedtype(self, feedtype):
546        """
547        Overwrite the feed type, which might not be set correctly.
548        Parameters:
549            feedtype:     'linear' or 'circular'
550        """
551        self._setfeedtype(feedtype)
552        self._add_history("set_feedtype", vars())
553        print_log()
554
555    def set_doppler(self, doppler='RADIO'):
556        """
557        Set the doppler for all following operations on this scantable.
558        Parameters:
559            doppler:    One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
560        """
561        varlist = vars()
562        inf = list(self._getcoordinfo())
563        inf[2] = doppler
564        self._setcoordinfo(inf)
565        self._add_history("set_doppler", vars())
566        print_log()
567
568    def set_freqframe(self, frame=None):
569        """
570        Set the frame type of the Spectral Axis.
571        Parameters:
572            frame:   an optional frame type, default 'LSRK'. Valid frames are:
573                     'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
574                     'GEO', 'GALACTO', 'LGROUP', 'CMB'
575        Examples:
576            scan.set_freqframe('BARY')
577        """
578        if frame is None: frame = rcParams['scantable.freqframe']
579        varlist = vars()
580        valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
581                   'GEO', 'GALACTO', 'LGROUP', 'CMB']
582
583        if frame in valid:
584            inf = list(self._getcoordinfo())
585            inf[1] = frame
586            self._setcoordinfo(inf)
587            self._add_history("set_freqframe", varlist)
588        else:
589            msg  = "Please specify a valid freq type. Valid types are:\n", valid
590            if rcParams['verbose']:
591                print msg
592            else:
593                raise TypeError(msg)
594        print_log()
595
596    def set_dirframe(self, frame=""):
597        """
598        Set the frame type of the Direction on the sky.
599        Parameters:
600            frame:   an optional frame type, default ''. Valid frames are:
601                     'J2000', 'B1950', 'GALACTIC'
602        Examples:
603            scan.set_dirframe('GALACTIC')
604        """
605        varlist = vars()
606        try:
607            Scantable.set_dirframe(self, frame)
608        except RuntimeError, msg:
609            if rcParams['verbose']:
610                print msg
611            else:
612                raise
613        self._add_history("set_dirframe", varlist)
614
615    def get_unit(self):
616        """
617        Get the default unit set in this scantable
618        Returns:
619            A unit string
620        """
621        inf = self._getcoordinfo()
622        unit = inf[0]
623        if unit == '': unit = 'channel'
624        return unit
625
626    def get_abcissa(self, rowno=0):
627        """
628        Get the abcissa in the current coordinate setup for the currently
629        selected Beam/IF/Pol
630        Parameters:
631            rowno:    an optional row number in the scantable. Default is the
632                      first row, i.e. rowno=0
633        Returns:
634            The abcissa values and the format string (as a dictionary)
635        """
636        abc = self._getabcissa(rowno)
637        lbl = self._getabcissalabel(rowno)
638        print_log()
639        return abc, lbl
640
641    def flag(self, mask=None):
642        """
643        Flag the selected data using an optional channel mask.
644        Parameters:
645            mask:   an optional channel mask, created with create_mask. Default
646                    (no mask) is all channels.
647        """
648        varlist = vars()
649        if mask is None:
650            mask = []
651        try:
652            self._flag(mask)
653        except RuntimeError, msg:
654            if rcParams['verbose']:
655                print msg
656                return
657            else: raise
658        self._add_history("flag", varlist)
659
660    def lag_flag(self, frequency, width=0.0, unit="GHz", insitu=None):
661        """
662        Flag the data in 'lag' space by providing a frequency to remove.
663        Flagged data in the scantable gets set to 0.0 before the fft.
664        No taper is applied.
665        Parameters:
666            frequency:    the frequency (really a period within the bandwidth)
667                          to remove
668            width:        the width of the frequency to remove, to remove a
669                          range of frequencies aroung the centre.
670            unit:         the frequency unit (default "GHz")
671        Notes:
672            It is recommended to flag edges of the band or strong
673            signals beforehand.
674        """
675        if insitu is None: insitu = rcParams['insitu']
676        self._math._setinsitu(insitu)
677        varlist = vars()
678        base = { "GHz": 100000000., "MHz": 1000000., "kHz": 1000., "Hz": 1. }
679        if not base.has_key(unit):
680            raise ValueError("%s is not a valid unit." % unit)
681        try:
682            s = scantable(self._math._lag_flag(self, frequency*base[unit],
683                                               width*base[unit]))
684        except RuntimeError, msg:
685            if rcParams['verbose']:
686                print msg
687                return
688            else: raise
689        s._add_history("lag_flag", varlist)
690        print_log()
691        if insitu:
692            self._assign(s)
693        else:
694            return s
695
696
697    def create_mask(self, *args, **kwargs):
698        """
699        Compute and return a mask based on [min, max] windows.
700        The specified windows are to be INCLUDED, when the mask is
701        applied.
702        Parameters:
703            [min, max], [min2, max2], ...
704                Pairs of start/end points (inclusive)specifying the regions
705                to be masked
706            invert:     optional argument. If specified as True,
707                        return an inverted mask, i.e. the regions
708                        specified are EXCLUDED
709            row:        create the mask using the specified row for
710                        unit conversions, default is row=0
711                        only necessary if frequency varies over rows.
712        Example:
713            scan.set_unit('channel')
714            a)
715            msk = scan.create_mask([400, 500], [800, 900])
716            # masks everything outside 400 and 500
717            # and 800 and 900 in the unit 'channel'
718
719            b)
720            msk = scan.create_mask([400, 500], [800, 900], invert=True)
721            # masks the regions between 400 and 500
722            # and 800 and 900 in the unit 'channel'
723            c)
724            mask only channel 400
725            msk =  scan.create_mask([400, 400])
726        """
727        row = 0
728        if kwargs.has_key("row"):
729            row = kwargs.get("row")
730        data = self._getabcissa(row)
731        u = self._getcoordinfo()[0]
732        if rcParams['verbose']:
733            if u == "": u = "channel"
734            msg = "The current mask window unit is %s" % u
735            i = self._check_ifs()
736            if not i:
737                msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
738            asaplog.push(msg)
739        n = self.nchan()
740        msk = _n_bools(n, False)
741        # test if args is a 'list' or a 'normal *args - UGLY!!!
742
743        ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
744             and args or args[0]
745        for window in ws:
746            if (len(window) != 2 or window[0] > window[1] ):
747                raise TypeError("A window needs to be defined as [min, max]")
748            for i in range(n):
749                if data[i] >= window[0] and data[i] <= window[1]:
750                    msk[i] = True
751        if kwargs.has_key('invert'):
752            if kwargs.get('invert'):
753                msk = mask_not(msk)
754        print_log()
755        return msk
756
757    def get_restfreqs(self):
758        """
759        Get the restfrequency(s) stored in this scantable.
760        The return value(s) are always of unit 'Hz'
761        Parameters:
762            none
763        Returns:
764            a list of doubles
765        """
766        return list(self._getrestfreqs())
767
768
769    def set_restfreqs(self, freqs=None, unit='Hz'):
770        """
771        Set or replace the restfrequency specified and
772        If the 'freqs' argument holds a scalar,
773        then that rest frequency will be applied to all the selected
774        data.  If the 'freqs' argument holds
775        a vector, then it MUST be of equal or smaller length than
776        the number of IFs (and the available restfrequencies will be
777        replaced by this vector).  In this case, *all* data have
778        the restfrequency set per IF according
779        to the corresponding value you give in the 'freqs' vector.
780        E.g. 'freqs=[1e9, 2e9]'  would mean IF 0 gets restfreq 1e9 and
781        IF 1 gets restfreq 2e9.
782        You can also specify the frequencies via a linecatalog/
783
784        Parameters:
785            freqs:   list of rest frequency values or string idenitfiers
786            unit:    unit for rest frequency (default 'Hz')
787
788        Example:
789            # set the given restfrequency for the whole table
790            scan.set_restfreqs(freqs=1.4e9)
791            # If thee number of IFs in the data is >= 2 IF0 gets the first
792            # value IF1 the second...
793            scan.set_restfreqs(freqs=[1.4e9, 1.67e9])
794            #set the given restfrequency for the whole table (by name)
795            scan.set_restfreqs(freqs="OH1667")
796
797        Note:
798            To do more sophisticate Restfrequency setting, e.g. on a
799            source and IF basis, use scantable.set_selection() before using
800            this function.
801            # provide your scantable is call scan
802            selection = selector()
803            selection.set_name("ORION*")
804            selection.set_ifs([1])
805            scan.set_selection(selection)
806            scan.set_restfreqs(freqs=86.6e9)
807
808        """
809        varlist = vars()
810        from asap import linecatalog
811        # simple  value
812        if isinstance(freqs, int) or isinstance(freqs, float):
813            self._setrestfreqs(freqs, "",unit)
814        # list of values
815        elif isinstance(freqs, list) or isinstance(freqs, tuple):
816            # list values are scalars
817            if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
818                sel = selector()
819                savesel = self._getselection()
820                iflist = self.getifnos()
821                for i in xrange(len(freqs)):
822                    sel.set_ifs(iflist[i])
823                    self._setselection(sel)
824                    self._setrestfreqs(freqs[i], "",unit)
825                self._setselection(savesel)
826            # list values are tuples, (value, name)
827            elif isinstance(freqs[-1], dict):
828                sel = selector()
829                savesel = self._getselection()
830                iflist = self.getifnos()
831                for i in xrange(len(freqs)):
832                    sel.set_ifs(iflist[i])
833                    self._setselection(sel)
834                    self._setrestfreqs(freqs[i]["value"],
835                                       freqs[i]["name"], "MHz")
836                self._setselection(savesel)
837        # freqs are to be taken from a linecatalog
838        elif isinstance(freqs, linecatalog):
839            sel = selector()
840            savesel = self._getselection()
841            iflist = self.getifnos()
842            for i in xrange(freqs.nrow()):
843                sel.set_ifs(iflist[i])
844                self._setselection(sel)
845                self._setrestfreqs(freqs.get_frequency(i),
846                                   freqs.get_name(i), "MHz")
847                # ensure that we are not iterating past nIF
848                if i == self.nif()-1: break
849            self._setselection(savesel)
850        else:
851            return
852        self._add_history("set_restfreqs", varlist)
853
854
855
856    def history(self, filename=None):
857        """
858        Print the history. Optionally to a file.
859        Parameters:
860            filename:    The name  of the file to save the history to.
861        """
862        hist = list(self._gethistory())
863        out = "-"*80
864        for h in hist:
865            if h.startswith("---"):
866                out += "\n"+h
867            else:
868                items = h.split("##")
869                date = items[0]
870                func = items[1]
871                items = items[2:]
872                out += "\n"+date+"\n"
873                out += "Function: %s\n  Parameters:" % (func)
874                for i in items:
875                    s = i.split("=")
876                    out += "\n   %s = %s" % (s[0], s[1])
877                out += "\n"+"-"*80
878        if filename is not None:
879            if filename is "":
880                filename = 'scantable_history.txt'
881            import os
882            filename = os.path.expandvars(os.path.expanduser(filename))
883            if not os.path.isdir(filename):
884                data = open(filename, 'w')
885                data.write(out)
886                data.close()
887            else:
888                msg = "Illegal file name '%s'." % (filename)
889                if rcParams['verbose']:
890                    print msg
891                else:
892                    raise IOError(msg)
893        if rcParams['verbose']:
894            try:
895                from IPython.genutils import page as pager
896            except ImportError:
897                from pydoc import pager
898            pager(out)
899        else:
900            return out
901        return
902    #
903    # Maths business
904    #
905
906    def average_time(self, mask=None, scanav=False, weight='tint', align=False):
907        """
908        Return the (time) weighted average of a scan.
909        Note:
910            in channels only - align if necessary
911        Parameters:
912            mask:     an optional mask (only used for 'var' and 'tsys'
913                      weighting)
914            scanav:   True averages each scan separately
915                      False (default) averages all scans together,
916            weight:   Weighting scheme.
917                      'none'     (mean no weight)
918                      'var'      (1/var(spec) weighted)
919                      'tsys'     (1/Tsys**2 weighted)
920                      'tint'     (integration time weighted)
921                      'tintsys'  (Tint/Tsys**2)
922                      'median'   ( median averaging)
923                      The default is 'tint'
924            align:    align the spectra in velocity before averaging. It takes
925                      the time of the first spectrum as reference time.
926        Example:
927            # time average the scantable without using a mask
928            newscan = scan.average_time()
929        """
930        varlist = vars()
931        if weight is None: weight = 'TINT'
932        if mask is None: mask = ()
933        if scanav: scanav = "SCAN"
934        else: scanav = "NONE"
935        scan = (self, )
936        try:
937            if align:
938                scan = (self.freq_align(insitu=False), )
939            s = None
940            if weight.upper() == 'MEDIAN':
941                s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
942                                                         scanav))
943            else:
944                s = scantable(self._math._average(scan, mask, weight.upper(),
945                              scanav))
946        except RuntimeError, msg:
947            if rcParams['verbose']:
948                print msg
949                return
950            else: raise
951        s._add_history("average_time", varlist)
952        print_log()
953        return s
954
955    def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
956        """
957        Return a scan where all spectra are converted to either
958        Jansky or Kelvin depending upon the flux units of the scan table.
959        By default the function tries to look the values up internally.
960        If it can't find them (or if you want to over-ride), you must
961        specify EITHER jyperk OR eta (and D which it will try to look up
962        also if you don't set it). jyperk takes precedence if you set both.
963        Parameters:
964            jyperk:      the Jy / K conversion factor
965            eta:         the aperture efficiency
966            d:           the geomtric diameter (metres)
967            insitu:      if False a new scantable is returned.
968                         Otherwise, the scaling is done in-situ
969                         The default is taken from .asaprc (False)
970        """
971        if insitu is None: insitu = rcParams['insitu']
972        self._math._setinsitu(insitu)
973        varlist = vars()
974        if jyperk is None: jyperk = -1.0
975        if d is None: d = -1.0
976        if eta is None: eta = -1.0
977        s = scantable(self._math._convertflux(self, d, eta, jyperk))
978        s._add_history("convert_flux", varlist)
979        print_log()
980        if insitu: self._assign(s)
981        else: return s
982
983    def gain_el(self, poly=None, filename="", method="linear", insitu=None):
984        """
985        Return a scan after applying a gain-elevation correction.
986        The correction can be made via either a polynomial or a
987        table-based interpolation (and extrapolation if necessary).
988        You specify polynomial coefficients, an ascii table or neither.
989        If you specify neither, then a polynomial correction will be made
990        with built in coefficients known for certain telescopes (an error
991        will occur if the instrument is not known).
992        The data and Tsys are *divided* by the scaling factors.
993        Parameters:
994            poly:        Polynomial coefficients (default None) to compute a
995                         gain-elevation correction as a function of
996                         elevation (in degrees).
997            filename:    The name of an ascii file holding correction factors.
998                         The first row of the ascii file must give the column
999                         names and these MUST include columns
1000                         "ELEVATION" (degrees) and "FACTOR" (multiply data
1001                         by this) somewhere.
1002                         The second row must give the data type of the
1003                         column. Use 'R' for Real and 'I' for Integer.
1004                         An example file would be
1005                         (actual factors are arbitrary) :
1006
1007                         TIME ELEVATION FACTOR
1008                         R R R
1009                         0.1 0 0.8
1010                         0.2 20 0.85
1011                         0.3 40 0.9
1012                         0.4 60 0.85
1013                         0.5 80 0.8
1014                         0.6 90 0.75
1015            method:      Interpolation method when correcting from a table.
1016                         Values are  "nearest", "linear" (default), "cubic"
1017                         and "spline"
1018            insitu:      if False a new scantable is returned.
1019                         Otherwise, the scaling is done in-situ
1020                         The default is taken from .asaprc (False)
1021        """
1022
1023        if insitu is None: insitu = rcParams['insitu']
1024        self._math._setinsitu(insitu)
1025        varlist = vars()
1026        if poly is None:
1027            poly = ()
1028        from os.path import expandvars
1029        filename = expandvars(filename)
1030        s = scantable(self._math._gainel(self, poly, filename, method))
1031        s._add_history("gain_el", varlist)
1032        print_log()
1033        if insitu: self._assign(s)
1034        else: return s
1035
1036    def freq_align(self, reftime=None, method='cubic', insitu=None):
1037        """
1038        Return a scan where all rows have been aligned in frequency/velocity.
1039        The alignment frequency frame (e.g. LSRK) is that set by function
1040        set_freqframe.
1041        Parameters:
1042            reftime:     reference time to align at. By default, the time of
1043                         the first row of data is used.
1044            method:      Interpolation method for regridding the spectra.
1045                         Choose from "nearest", "linear", "cubic" (default)
1046                         and "spline"
1047            insitu:      if False a new scantable is returned.
1048                         Otherwise, the scaling is done in-situ
1049                         The default is taken from .asaprc (False)
1050        """
1051        if insitu is None: insitu = rcParams["insitu"]
1052        self._math._setinsitu(insitu)
1053        varlist = vars()
1054        if reftime is None: reftime = ""
1055        s = scantable(self._math._freq_align(self, reftime, method))
1056        s._add_history("freq_align", varlist)
1057        print_log()
1058        if insitu: self._assign(s)
1059        else: return s
1060
1061    def opacity(self, tau, insitu=None):
1062        """
1063        Apply an opacity correction. The data
1064        and Tsys are multiplied by the correction factor.
1065        Parameters:
1066            tau:         Opacity from which the correction factor is
1067                         exp(tau*ZD)
1068                         where ZD is the zenith-distance
1069            insitu:      if False a new scantable is returned.
1070                         Otherwise, the scaling is done in-situ
1071                         The default is taken from .asaprc (False)
1072        """
1073        if insitu is None: insitu = rcParams['insitu']
1074        self._math._setinsitu(insitu)
1075        varlist = vars()
1076        s = scantable(self._math._opacity(self, tau))
1077        s._add_history("opacity", varlist)
1078        print_log()
1079        if insitu: self._assign(s)
1080        else: return s
1081
1082    def bin(self, width=5, insitu=None):
1083        """
1084        Return a scan where all spectra have been binned up.
1085        Parameters:
1086            width:       The bin width (default=5) in pixels
1087            insitu:      if False a new scantable is returned.
1088                         Otherwise, the scaling is done in-situ
1089                         The default is taken from .asaprc (False)
1090        """
1091        if insitu is None: insitu = rcParams['insitu']
1092        self._math._setinsitu(insitu)
1093        varlist = vars()
1094        s = scantable(self._math._bin(self, width))
1095        s._add_history("bin", varlist)
1096        print_log()
1097        if insitu: self._assign(s)
1098        else: return s
1099
1100
1101    def resample(self, width=5, method='cubic', insitu=None):
1102        """
1103        Return a scan where all spectra have been binned up.
1104       
1105        Parameters:
1106            width:       The bin width (default=5) in pixels
1107            method:      Interpolation method when correcting from a table.
1108                         Values are  "nearest", "linear", "cubic" (default)
1109                         and "spline"
1110            insitu:      if False a new scantable is returned.
1111                         Otherwise, the scaling is done in-situ
1112                         The default is taken from .asaprc (False)
1113        """
1114        if insitu is None: insitu = rcParams['insitu']
1115        self._math._setinsitu(insitu)
1116        varlist = vars()
1117        s = scantable(self._math._resample(self, method, width))
1118        s._add_history("resample", varlist)
1119        print_log()
1120        if insitu: self._assign(s)
1121        else: return s
1122
1123
1124    def average_pol(self, mask=None, weight='none'):
1125        """
1126        Average the Polarisations together.
1127        Parameters:
1128            mask:        An optional mask defining the region, where the
1129                         averaging will be applied. The output will have all
1130                         specified points masked.
1131            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1132                         weighted), or 'tsys' (1/Tsys**2 weighted)
1133        """
1134        varlist = vars()
1135        if mask is None:
1136            mask = ()
1137        s = scantable(self._math._averagepol(self, mask, weight.upper()))
1138        s._add_history("average_pol", varlist)
1139        print_log()
1140        return s
1141
1142    def average_beam(self, mask=None, weight='none'):
1143        """
1144        Average the Beams together.
1145        Parameters:
1146            mask:        An optional mask defining the region, where the
1147                         averaging will be applied. The output will have all
1148                         specified points masked.
1149            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1150                         weighted), or 'tsys' (1/Tsys**2 weighted)
1151        """
1152        varlist = vars()
1153        if mask is None:
1154            mask = ()
1155        s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1156        s._add_history("average_beam", varlist)
1157        print_log()
1158        return s
1159
1160    def convert_pol(self, poltype=None):
1161        """
1162        Convert the data to a different polarisation type.
1163        Parameters:
1164            poltype:    The new polarisation type. Valid types are:
1165                        "linear", "stokes" and "circular"
1166        """
1167        varlist = vars()
1168        try:
1169            s = scantable(self._math._convertpol(self, poltype))
1170        except RuntimeError, msg:
1171            if rcParams['verbose']:
1172                print msg
1173                return
1174            else:
1175                raise
1176        s._add_history("convert_pol", varlist)
1177        print_log()
1178        return s
1179
1180    def smooth(self, kernel="hanning", width=5.0, insitu=None):
1181        """
1182        Smooth the spectrum by the specified kernel (conserving flux).
1183        Parameters:
1184            kernel:     The type of smoothing kernel. Select from
1185                        'hanning' (default), 'gaussian' and 'boxcar'.
1186                        The first three characters are sufficient.
1187            width:      The width of the kernel in pixels. For hanning this is
1188                        ignored otherwise it defauls to 5 pixels.
1189                        For 'gaussian' it is the Full Width Half
1190                        Maximum. For 'boxcar' it is the full width.
1191            insitu:     if False a new scantable is returned.
1192                        Otherwise, the scaling is done in-situ
1193                        The default is taken from .asaprc (False)
1194        Example:
1195             none
1196        """
1197        if insitu is None: insitu = rcParams['insitu']
1198        self._math._setinsitu(insitu)
1199        varlist = vars()
1200        s = scantable(self._math._smooth(self, kernel.lower(), width))
1201        s._add_history("smooth", varlist)
1202        print_log()
1203        if insitu: self._assign(s)
1204        else: return s
1205
1206
1207    def poly_baseline(self, mask=None, order=0, plot=False, insitu=None):
1208        """
1209        Return a scan which has been baselined (all rows) by a polynomial.
1210        Parameters:
1211            mask:       an optional mask
1212            order:      the order of the polynomial (default is 0)
1213            plot:       plot the fit and the residual. In this each
1214                        indivual fit has to be approved, by typing 'y'
1215                        or 'n'
1216            insitu:     if False a new scantable is returned.
1217                        Otherwise, the scaling is done in-situ
1218                        The default is taken from .asaprc (False)
1219        Example:
1220            # return a scan baselined by a third order polynomial,
1221            # not using a mask
1222            bscan = scan.poly_baseline(order=3)
1223        """
1224        if insitu is None: insitu = rcParams['insitu']
1225        varlist = vars()
1226        if mask is None:
1227            mask = [True for i in xrange(self.nchan(-1))]
1228        from asap.asapfitter import fitter
1229        try:
1230            f = fitter()
1231            f.set_scan(self, mask)
1232            f.set_function(poly=order)
1233            s = f.auto_fit(insitu, plot=plot)
1234            s._add_history("poly_baseline", varlist)
1235            print_log()
1236            if insitu: self._assign(s)
1237            else: return s
1238        except RuntimeError:
1239            msg = "The fit failed, possibly because it didn't converge."
1240            if rcParams['verbose']:
1241                print msg
1242                return
1243            else:
1244                raise RuntimeError(msg)
1245
1246
1247    def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
1248                           threshold=3, chan_avg_limit=1, plot=False,
1249                           insitu=None):
1250        """
1251        Return a scan which has been baselined (all rows) by a polynomial.
1252        Spectral lines are detected first using linefinder and masked out
1253        to avoid them affecting the baseline solution.
1254
1255        Parameters:
1256            mask:       an optional mask retreived from scantable
1257            edge:       an optional number of channel to drop at
1258                        the edge of spectrum. If only one value is
1259                        specified, the same number will be dropped from
1260                        both sides of the spectrum. Default is to keep
1261                        all channels. Nested tuples represent individual
1262                        edge selection for different IFs (a number of spectral
1263                        channels can be different)
1264            order:      the order of the polynomial (default is 0)
1265            threshold:  the threshold used by line finder. It is better to
1266                        keep it large as only strong lines affect the
1267                        baseline solution.
1268            chan_avg_limit:
1269                        a maximum number of consequtive spectral channels to
1270                        average during the search of weak and broad lines.
1271                        The default is no averaging (and no search for weak
1272                        lines). If such lines can affect the fitted baseline
1273                        (e.g. a high order polynomial is fitted), increase this
1274                        parameter (usually values up to 8 are reasonable). Most
1275                        users of this method should find the default value
1276                        sufficient.
1277            plot:       plot the fit and the residual. In this each
1278                        indivual fit has to be approved, by typing 'y'
1279                        or 'n'
1280            insitu:     if False a new scantable is returned.
1281                        Otherwise, the scaling is done in-situ
1282                        The default is taken from .asaprc (False)
1283
1284        Example:
1285            scan2=scan.auto_poly_baseline(order=7)
1286        """
1287        if insitu is None: insitu = rcParams['insitu']
1288        varlist = vars()
1289        from asap.asapfitter import fitter
1290        from asap.asaplinefind import linefinder
1291        from asap import _is_sequence_or_number as _is_valid
1292
1293        # check whether edge is set up for each IF individually
1294        individualedge = False;
1295        if len(edge) > 1:
1296            if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1297                individualedge = True;
1298
1299        if not _is_valid(edge, int) and not individualedge:
1300            raise ValueError, "Parameter 'edge' has to be an integer or a \
1301            pair of integers specified as a tuple. Nested tuples are allowed \
1302            to make individual selection for different IFs."
1303
1304        curedge = (0, 0)
1305        if individualedge:
1306            for edgepar in edge:
1307                if not _is_valid(edgepar, int):
1308                    raise ValueError, "Each element of the 'edge' tuple has \
1309                                       to be a pair of integers or an integer."
1310        else:
1311            curedge = edge;
1312
1313        # setup fitter
1314        f = fitter()
1315        f.set_function(poly=order)
1316
1317        # setup line finder
1318        fl = linefinder()
1319        fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
1320
1321        if not insitu:
1322            workscan = self.copy()
1323        else:
1324            workscan = self
1325
1326        fl.set_scan(workscan)
1327
1328        rows = range(workscan.nrow())
1329        asaplog.push("Processing:")
1330        for r in rows:
1331            msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1332                (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1333                 workscan.getpol(r), workscan.getcycle(r))
1334            asaplog.push(msg, False)
1335
1336            # figure out edge parameter
1337            if individualedge:
1338                if len(edge) >= workscan.getif(r):
1339                    raise RuntimeError, "Number of edge elements appear to " \
1340                                        "be less than the number of IFs"
1341                    curedge = edge[workscan.getif(r)]
1342
1343            # setup line finder
1344            fl.find_lines(r, mask, curedge)
1345            f.set_scan(workscan, fl.get_mask())
1346            f.x = workscan._getabcissa(r)
1347            f.y = workscan._getspectrum(r)
1348            f.data = None
1349            f.fit()
1350            x = f.get_parameters()
1351            if plot:
1352                f.plot(residual=True)
1353                x = raw_input("Accept fit ( [y]/n ): ")
1354                if x.upper() == 'N':
1355                    continue
1356            workscan._setspectrum(f.fitter.getresidual(), r)
1357        if plot:
1358            f._p.unmap()
1359            f._p = None
1360        workscan._add_history("auto_poly_baseline", varlist)
1361        if insitu:
1362            self._assign(workscan)
1363        else:
1364            return workscan
1365
1366    def rotate_linpolphase(self, angle):
1367        """
1368        Rotate the phase of the complex polarization O=Q+iU correlation.
1369        This is always done in situ in the raw data.  So if you call this
1370        function more than once then each call rotates the phase further.
1371        Parameters:
1372            angle:   The angle (degrees) to rotate (add) by.
1373        Examples:
1374            scan.rotate_linpolphase(2.3)
1375        """
1376        varlist = vars()
1377        self._math._rotate_linpolphase(self, angle)
1378        self._add_history("rotate_linpolphase", varlist)
1379        print_log()
1380        return
1381
1382
1383    def rotate_xyphase(self, angle):
1384        """
1385        Rotate the phase of the XY correlation.  This is always done in situ
1386        in the data.  So if you call this function more than once
1387        then each call rotates the phase further.
1388        Parameters:
1389            angle:   The angle (degrees) to rotate (add) by.
1390        Examples:
1391            scan.rotate_xyphase(2.3)
1392        """
1393        varlist = vars()
1394        self._math._rotate_xyphase(self, angle)
1395        self._add_history("rotate_xyphase", varlist)
1396        print_log()
1397        return
1398
1399    def swap_linears(self):
1400        """
1401        Swap the linear polarisations XX and YY, or better the first two
1402        polarisations as this also works for ciculars.
1403        """
1404        varlist = vars()
1405        self._math._swap_linears(self)
1406        self._add_history("swap_linears", varlist)
1407        print_log()
1408        return
1409
1410    def invert_phase(self):
1411        """
1412        Invert the phase of the complex polarisation
1413        """
1414        varlist = vars()
1415        self._math._invert_phase(self)
1416        self._add_history("invert_phase", varlist)
1417        print_log()
1418        return
1419
1420    def add(self, offset, insitu=None):
1421        """
1422        Return a scan where all spectra have the offset added
1423        Parameters:
1424            offset:      the offset
1425            insitu:      if False a new scantable is returned.
1426                         Otherwise, the scaling is done in-situ
1427                         The default is taken from .asaprc (False)
1428        """
1429        if insitu is None: insitu = rcParams['insitu']
1430        self._math._setinsitu(insitu)
1431        varlist = vars()
1432        s = scantable(self._math._unaryop(self, offset, "ADD", False))
1433        s._add_history("add", varlist)
1434        print_log()
1435        if insitu:
1436            self._assign(s)
1437        else:
1438            return s
1439
1440    def scale(self, factor, tsys=True, insitu=None):
1441        """
1442        Return a scan where all spectra are scaled by the give 'factor'
1443        Parameters:
1444            factor:      the scaling factor
1445            insitu:      if False a new scantable is returned.
1446                         Otherwise, the scaling is done in-situ
1447                         The default is taken from .asaprc (False)
1448            tsys:        if True (default) then apply the operation to Tsys
1449                         as well as the data
1450        """
1451        if insitu is None: insitu = rcParams['insitu']
1452        self._math._setinsitu(insitu)
1453        varlist = vars()
1454        s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
1455        s._add_history("scale", varlist)
1456        print_log()
1457        if insitu:
1458            self._assign(s)
1459        else:
1460            return s
1461
1462    def auto_quotient(self, preserve=True, mode='paired'):
1463        """
1464        This function allows to build quotients automatically.
1465        It assumes the observation to have the same numer of
1466        "ons" and "offs"
1467        Parameters:
1468            preserve:       you can preserve (default) the continuum or
1469                            remove it.  The equations used are
1470                            preserve: Output = Toff * (on/off) - Toff
1471                            remove:   Output = Toff * (on/off) - Ton
1472            mode:           the on/off detection mode
1473                            'paired' (default)
1474                            identifies 'off' scans by the
1475                            trailing '_R' (Mopra/Parkes) or
1476                            '_e'/'_w' (Tid) and matches
1477                            on/off pairs from the observing pattern
1478                'time'
1479                   finds the closest off in time
1480
1481        """
1482        modes = ["time", "paired"]
1483        if not mode in modes:
1484            msg = "please provide valid mode. Valid modes are %s" % (modes)
1485            raise ValueError(msg)
1486        varlist = vars()
1487        s = None
1488        if mode.lower() == "paired":
1489            basesel = self.get_selection()
1490            sel = selector()+basesel
1491            sel.set_query("SRCTYPE==1")
1492            self.set_selection(sel)
1493            offs = self.copy()
1494            sel.set_query("SRCTYPE==0")
1495            self.set_selection(sel)
1496            ons = self.copy()
1497            s = scantable(self._math._quotient(ons, offs, preserve))
1498            self.set_selection(basesel)
1499        elif mode.lower() == "time":
1500            s = scantable(self._math._auto_quotient(self, mode, preserve))
1501        s._add_history("auto_quotient", varlist)
1502        print_log()
1503        return s
1504
1505    def mx_quotient(self, mask = None, weight='median', preserve=True):
1506        """
1507        Form a quotient using "off" beams when observing in "MX" mode.
1508        Parameters:
1509            mask:           an optional mask to be used when weight == 'stddev'
1510            weight:         How to average the off beams.  Default is 'median'.
1511            preserve:       you can preserve (default) the continuum or
1512                            remove it.  The equations used are
1513                            preserve: Output = Toff * (on/off) - Toff
1514                            remove:   Output = Toff * (on/off) - Ton
1515        """
1516        if mask is None: mask = ()
1517        varlist = vars()
1518        on = scantable(self._math._mx_extract(self, 'on'))
1519        preoff = scantable(self._math._mx_extract(self, 'off'))
1520        off = preoff.average_time(mask=mask, weight=weight, scanav=False)
1521        from asapmath  import quotient
1522        q = quotient(on, off, preserve)
1523        q._add_history("mx_quotient", varlist)
1524        print_log()
1525        return q
1526
1527    def freq_switch(self, insitu=None):
1528        """
1529        Apply frequency switching to the data.
1530        Parameters:
1531            insitu:      if False a new scantable is returned.
1532                         Otherwise, the swictching is done in-situ
1533                         The default is taken from .asaprc (False)
1534        Example:
1535            none
1536        """
1537        if insitu is None: insitu = rcParams['insitu']
1538        self._math._setinsitu(insitu)
1539        varlist = vars()
1540        s = scantable(self._math._freqswitch(self))
1541        s._add_history("freq_switch", varlist)
1542        print_log()
1543        if insitu: self._assign(s)
1544        else: return s
1545
1546    def recalc_azel(self):
1547        """
1548        Recalculate the azimuth and elevation for each position.
1549        Parameters:
1550            none
1551        Example:
1552        """
1553        varlist = vars()
1554        self._recalcazel()
1555        self._add_history("recalc_azel", varlist)
1556        print_log()
1557        return
1558
1559    def __add__(self, other):
1560        varlist = vars()
1561        s = None
1562        if isinstance(other, scantable):
1563            s = scantable(self._math._binaryop(self, other, "ADD"))
1564        elif isinstance(other, float):
1565            s = scantable(self._math._unaryop(self, other, "ADD", False))
1566        else:
1567            raise TypeError("Other input is not a scantable or float value")
1568        s._add_history("operator +", varlist)
1569        print_log()
1570        return s
1571
1572    def __sub__(self, other):
1573        """
1574        implicit on all axes and on Tsys
1575        """
1576        varlist = vars()
1577        s = None
1578        if isinstance(other, scantable):
1579            s = scantable(self._math._binaryop(self, other, "SUB"))
1580        elif isinstance(other, float):
1581            s = scantable(self._math._unaryop(self, other, "SUB", False))
1582        else:
1583            raise TypeError("Other input is not a scantable or float value")
1584        s._add_history("operator -", varlist)
1585        print_log()
1586        return s
1587
1588    def __mul__(self, other):
1589        """
1590        implicit on all axes and on Tsys
1591        """
1592        varlist = vars()
1593        s = None
1594        if isinstance(other, scantable):
1595            s = scantable(self._math._binaryop(self, other, "MUL"))
1596        elif isinstance(other, float):
1597            s = scantable(self._math._unaryop(self, other, "MUL", False))
1598        else:
1599            raise TypeError("Other input is not a scantable or float value")
1600        s._add_history("operator *", varlist)
1601        print_log()
1602        return s
1603
1604
1605    def __div__(self, other):
1606        """
1607        implicit on all axes and on Tsys
1608        """
1609        varlist = vars()
1610        s = None
1611        if isinstance(other, scantable):
1612            s = scantable(self._math._binaryop(self, other, "DIV"))
1613        elif isinstance(other, float):
1614            if other == 0.0:
1615                raise ZeroDivisionError("Dividing by zero is not recommended")
1616            s = scantable(self._math._unaryop(self, other, "DIV", False))
1617        else:
1618            raise TypeError("Other input is not a scantable or float value")
1619        s._add_history("operator /", varlist)
1620        print_log()
1621        return s
1622
1623    def get_fit(self, row=0):
1624        """
1625        Print or return the stored fits for a row in the scantable
1626        Parameters:
1627            row:    the row which the fit has been applied to.
1628        """
1629        if row > self.nrow():
1630            return
1631        from asap.asapfit import asapfit
1632        fit = asapfit(self._getfit(row))
1633        if rcParams['verbose']:
1634            print fit
1635            return
1636        else:
1637            return fit.as_dict()
1638
1639    def _add_history(self, funcname, parameters):
1640        # create date
1641        sep = "##"
1642        from datetime import datetime
1643        dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1644        hist = dstr+sep
1645        hist += funcname+sep#cdate+sep
1646        if parameters.has_key('self'): del parameters['self']
1647        for k, v in parameters.iteritems():
1648            if type(v) is dict:
1649                for k2, v2 in v.iteritems():
1650                    hist += k2
1651                    hist += "="
1652                    if isinstance(v2, scantable):
1653                        hist += 'scantable'
1654                    elif k2 == 'mask':
1655                        if isinstance(v2, list) or isinstance(v2, tuple):
1656                            hist += str(self._zip_mask(v2))
1657                        else:
1658                            hist += str(v2)
1659                    else:
1660                        hist += str(v2)
1661            else:
1662                hist += k
1663                hist += "="
1664                if isinstance(v, scantable):
1665                    hist += 'scantable'
1666                elif k == 'mask':
1667                    if isinstance(v, list) or isinstance(v, tuple):
1668                        hist += str(self._zip_mask(v))
1669                    else:
1670                        hist += str(v)
1671                else:
1672                    hist += str(v)
1673            hist += sep
1674        hist = hist[:-2] # remove trailing '##'
1675        self._addhistory(hist)
1676
1677
1678    def _zip_mask(self, mask):
1679        mask = list(mask)
1680        i = 0
1681        segments = []
1682        while mask[i:].count(1):
1683            i += mask[i:].index(1)
1684            if mask[i:].count(0):
1685                j = i + mask[i:].index(0)
1686            else:
1687                j = len(mask)
1688            segments.append([i, j])
1689            i = j
1690        return segments
1691
1692    def _get_ordinate_label(self):
1693        fu = "("+self.get_fluxunit()+")"
1694        import re
1695        lbl = "Intensity"
1696        if re.match(".K.", fu):
1697            lbl = "Brightness Temperature "+ fu
1698        elif re.match(".Jy.", fu):
1699            lbl = "Flux density "+ fu
1700        return lbl
1701
1702    def _check_ifs(self):
1703        nchans = [self.nchan(i) for i in range(self.nif(-1))]
1704        nchans = filter(lambda t: t > 0, nchans)
1705        return (sum(nchans)/len(nchans) == nchans[0])
1706
1707    def _fill(self, names, unit, average):
1708        import os
1709        from asap._asap import stfiller
1710        first = True
1711        fullnames = []
1712        for name in names:
1713            name = os.path.expandvars(name)
1714            name = os.path.expanduser(name)
1715            if not os.path.exists(name):
1716                msg = "File '%s' does not exists" % (name)
1717                if rcParams['verbose']:
1718                    asaplog.push(msg)
1719                    print asaplog.pop().strip()
1720                    return
1721                raise IOError(msg)
1722            fullnames.append(name)
1723        if average:
1724            asaplog.push('Auto averaging integrations')
1725        stype = int(rcParams['scantable.storage'].lower() == 'disk')
1726        for name in fullnames:
1727            tbl = Scantable(stype)
1728            r = stfiller(tbl)
1729            msg = "Importing %s..." % (name)
1730            asaplog.push(msg, False)
1731            print_log()
1732            r._open(name, -1, -1)
1733            r._read()
1734            #tbl = r._getdata()
1735            if average:
1736                tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
1737                #tbl = tbl2
1738            if not first:
1739                tbl = self._math._merge([self, tbl])
1740                #tbl = tbl2
1741            Scantable.__init__(self, tbl)
1742            r._close()
1743            del r, tbl
1744            first = False
1745        if unit is not None:
1746            self.set_fluxunit(unit)
1747        self.set_freqframe(rcParams['scantable.freqframe'])
1748
Note: See TracBrowser for help on using the repository browser.