source: trunk/python/scantable.py @ 1360

Last change on this file since 1360 was 1360, checked in by mar637, 17 years ago

enhancement #107; added scantable.shift_refpix

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 65.3 KB
Line 
1from asap._asap import Scantable
2from asap import rcParams
3from asap import print_log
4from asap import asaplog
5from asap import selector
6from asap import linecatalog
7from asap import _n_bools, mask_not, mask_and, mask_or
8
9class scantable(Scantable):
10    """
11        The ASAP container for scans
12    """
13
14    def __init__(self, filename, average=None, unit=None):
15        """
16        Create a scantable from a saved one or make a reference
17        Parameters:
18            filename:    the name of an asap table on disk
19                         or
20                         the name of a rpfits/sdfits/ms file
21                         (integrations within scans are auto averaged
22                         and the whole file is read)
23                         or
24                         [advanced] a reference to an existing
25                         scantable
26            average:     average all integrations withinb a scan on read.
27                         The default (True) is taken from .asaprc.
28            unit:         brightness unit; must be consistent with K or Jy.
29                         Over-rides the default selected by the reader
30                         (input rpfits/sdfits/ms) or replaces the value
31                         in existing scantables
32        """
33        if average is None:
34            average = rcParams['scantable.autoaverage']
35        varlist = vars()
36        from asap._asap import stmath
37        self._math = stmath()
38        if isinstance(filename, Scantable):
39            Scantable.__init__(self, filename)
40        else:
41            if isinstance(filename, str):
42                import os.path
43                filename = os.path.expandvars(filename)
44                filename = os.path.expanduser(filename)
45                if not os.path.exists(filename):
46                    s = "File '%s' not found." % (filename)
47                    if rcParams['verbose']:
48                        asaplog.push(s)
49                        print asaplog.pop().strip()
50                        return
51                    raise IOError(s)
52                if os.path.isdir(filename) \
53                    and not os.path.exists(filename+'/table.f1'):
54                    # crude check if asap table
55                    if os.path.exists(filename+'/table.info'):
56                        ondisk = rcParams['scantable.storage'] == 'disk'
57                        Scantable.__init__(self, filename, ondisk)
58                        if unit is not None:
59                            self.set_fluxunit(unit)
60                        self.set_freqframe(rcParams['scantable.freqframe'])
61                    else:
62                        msg = "The given file '%s'is not a valid " \
63                              "asap table." % (filename)
64                        if rcParams['verbose']:
65                            print msg
66                            return
67                        else:
68                            raise IOError(msg)
69                else:
70                    self._fill([filename], unit, average)
71            elif (isinstance(filename, list) or isinstance(filename, tuple)) \
72                  and isinstance(filename[-1], str):
73                self._fill(filename, unit, average)
74        self._add_history("scantable", varlist)
75        print_log()
76
77    def save(self, name=None, format=None, overwrite=False):
78        """
79        Store the scantable on disk. This can be an asap (aips++) Table,
80        SDFITS or MS2 format.
81        Parameters:
82            name:        the name of the outputfile. For format "ASCII"
83                         this is the root file name (data in 'name'.txt
84                         and header in 'name'_header.txt)
85            format:      an optional file format. Default is ASAP.
86                         Allowed are - 'ASAP' (save as ASAP [aips++] Table),
87                                       'SDFITS' (save as SDFITS file)
88                                       'ASCII' (saves as ascii text file)
89                                       'MS2' (saves as an aips++
90                                              MeasurementSet V2)
91            overwrite:   If the file should be overwritten if it exists.
92                         The default False is to return with warning
93                         without writing the output. USE WITH CARE.
94        Example:
95            scan.save('myscan.asap')
96            scan.save('myscan.sdfits', 'SDFITS')
97        """
98        from os import path
99        if format is None: format = rcParams['scantable.save']
100        suffix = '.'+format.lower()
101        if name is None or name == "":
102            name = 'scantable'+suffix
103            msg = "No filename given. Using default name %s..." % name
104            asaplog.push(msg)
105        name = path.expandvars(name)
106        if path.isfile(name) or path.isdir(name):
107            if not overwrite:
108                msg = "File %s exists." % name
109                if rcParams['verbose']:
110                    print msg
111                    return
112                else:
113                    raise IOError(msg)
114        format2 = format.upper()
115        if format2 == 'ASAP':
116            self._save(name)
117        else:
118            from asap._asap import stwriter as stw
119            writer = stw(format2)
120            writer.write(self, name)
121        print_log()
122        return
123
124    def copy(self):
125        """
126        Return a copy of this scantable.
127        Note:
128            This makes a full (deep) copy. scan2 = scan1 makes a reference.
129        Parameters:
130            none
131        Example:
132            copiedscan = scan.copy()
133        """
134        sd = scantable(Scantable._copy(self))
135        return sd
136
137    def drop_scan(self, scanid=None):
138        """
139        Return a new scantable where the specified scan number(s) has(have)
140        been dropped.
141        Parameters:
142            scanid:    a (list of) scan number(s)
143        """
144        from asap import _is_sequence_or_number as _is_valid
145        from asap import _to_list
146        from asap import unique
147        if not _is_valid(scanid):
148            if rcParams['verbose']:
149                print "Please specify a scanno to drop from the scantable"
150                return
151            else:
152                raise RuntimeError("No scan given")
153        try:
154            scanid = _to_list(scanid)
155            allscans = unique([ self.getscan(i) for i in range(self.nrow())])
156            for sid in scanid: allscans.remove(sid)
157            if len(allscans) == 0:
158                raise ValueError("Can't remove all scans")
159        except ValueError:
160            if rcParams['verbose']:
161                print "Couldn't find any match."
162                return
163            else: raise
164        try:
165            bsel = self.get_selection()
166            sel = selector()
167            sel.set_scans(allscans)
168            self.set_selection(bsel+sel)
169            scopy = self._copy()
170            self.set_selection(bsel)
171            return scantable(scopy)
172        except RuntimeError:
173            if rcParams['verbose']:
174                print "Couldn't find any match."
175            else:
176                raise
177
178
179    def get_scan(self, scanid=None):
180        """
181        Return a specific scan (by scanno) or collection of scans (by
182        source name) in a new scantable.
183        Note:
184            See scantable.drop_scan() for the inverse operation.
185        Parameters:
186            scanid:    a (list of) scanno or a source name, unix-style
187                       patterns are accepted for source name matching, e.g.
188                       '*_R' gets all 'ref scans
189        Example:
190            # get all scans containing the source '323p459'
191            newscan = scan.get_scan('323p459')
192            # get all 'off' scans
193            refscans = scan.get_scan('*_R')
194            # get a susbset of scans by scanno (as listed in scan.summary())
195            newscan = scan.get_scan([0, 2, 7, 10])
196        """
197        if scanid is None:
198            if rcParams['verbose']:
199                print "Please specify a scan no or name to " \
200                      "retrieve from the scantable"
201                return
202            else:
203                raise RuntimeError("No scan given")
204
205        try:
206            bsel = self.get_selection()
207            sel = selector()
208            if type(scanid) is str:
209                sel.set_name(scanid)
210                self.set_selection(bsel+sel)
211                scopy = self._copy()
212                self.set_selection(bsel)
213                return scantable(scopy)
214            elif type(scanid) is int:
215                sel.set_scans([scanid])
216                self.set_selection(bsel+sel)
217                scopy = self._copy()
218                self.set_selection(bsel)
219                return scantable(scopy)
220            elif type(scanid) is list:
221                sel.set_scans(scanid)
222                self.set_selection(sel)
223                scopy = self._copy()
224                self.set_selection(bsel)
225                return scantable(scopy)
226            else:
227                msg = "Illegal scanid type, use 'int' or 'list' if ints."
228                if rcParams['verbose']:
229                    print msg
230                else:
231                    raise TypeError(msg)
232        except RuntimeError:
233            if rcParams['verbose']: print "Couldn't find any match."
234            else: raise
235
236    def __str__(self):
237        return Scantable._summary(self, True)
238
239    def summary(self, filename=None):
240        """
241        Print a summary of the contents of this scantable.
242        Parameters:
243            filename:    the name of a file to write the putput to
244                         Default - no file output
245            verbose:     print extra info such as the frequency table
246                         The default (False) is taken from .asaprc
247        """
248        info = Scantable._summary(self, True)
249        #if verbose is None: verbose = rcParams['scantable.verbosesummary']
250        if filename is not None:
251            if filename is "":
252                filename = 'scantable_summary.txt'
253            from os.path import expandvars, isdir
254            filename = expandvars(filename)
255            if not isdir(filename):
256                data = open(filename, 'w')
257                data.write(info)
258                data.close()
259            else:
260                msg = "Illegal file name '%s'." % (filename)
261                if rcParams['verbose']:
262                    print msg
263                else:
264                    raise IOError(msg)
265        if rcParams['verbose']:
266            try:
267                from IPython.genutils import page as pager
268            except ImportError:
269                from pydoc import pager
270            pager(info)
271        else:
272            return info
273
274
275    def get_selection(self):
276        """
277        Get the selection object currently set on this scantable.
278        Parameters:
279            none
280        Example:
281            sel = scan.get_selection()
282            sel.set_ifs(0)              # select IF 0
283            scan.set_selection(sel)     # apply modified selection
284        """
285        return selector(self._getselection())
286
287    def set_selection(self, selection=selector()):
288        """
289        Select a subset of the data. All following operations on this scantable
290        are only applied to thi selection.
291        Parameters:
292            selection:    a selector object (default unset the selection)
293        Examples:
294            sel = selector()         # create a selection object
295            self.set_scans([0, 3])    # select SCANNO 0 and 3
296            scan.set_selection(sel)  # set the selection
297            scan.summary()           # will only print summary of scanno 0 an 3
298            scan.set_selection()     # unset the selection
299        """
300        self._setselection(selection)
301
302    def stats(self, stat='stddev', mask=None):
303        """
304        Determine the specified statistic of the current beam/if/pol
305        Takes a 'mask' as an optional parameter to specify which
306        channels should be excluded.
307        Parameters:
308            stat:    'min', 'max', 'sumsq', 'sum', 'mean'
309                     'var', 'stddev', 'avdev', 'rms', 'median'
310            mask:    an optional mask specifying where the statistic
311                     should be determined.
312        Example:
313            scan.set_unit('channel')
314            msk = scan.create_mask([100, 200], [500, 600])
315            scan.stats(stat='mean', mask=m)
316        """
317        if mask == None:
318            mask = []
319        axes = ['Beam', 'IF', 'Pol', 'Time']
320        if not self._check_ifs():
321            raise ValueError("Cannot apply mask as the IFs have different "
322                             "number of channels. Please use setselection() "
323                             "to select individual IFs")
324
325        statvals = self._math._stats(self, mask, stat)
326        out = ''
327        axes = []
328        for i in range(self.nrow()):
329            axis = []
330            axis.append(self.getscan(i))
331            axis.append(self.getbeam(i))
332            axis.append(self.getif(i))
333            axis.append(self.getpol(i))
334            axis.append(self.getcycle(i))
335            axes.append(axis)
336            tm = self._gettime(i)
337            src = self._getsourcename(i)
338            out += 'Scan[%d] (%s) ' % (axis[0], src)
339            out += 'Time[%s]:\n' % (tm)
340            if self.nbeam(-1) > 1: out +=  ' Beam[%d] ' % (axis[1])
341            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (axis[2])
342            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (axis[3])
343            out += '= %3.3f\n' % (statvals[i])
344            out +=  "--------------------------------------------------\n"
345
346        if rcParams['verbose']:
347            print "--------------------------------------------------"
348            print " ", stat
349            print "--------------------------------------------------"
350            print out
351        #else:
352            #retval = { 'axesnames': ['scanno', 'beamno', 'ifno', 'polno', 'cycleno'],
353            #           'axes' : axes,
354            #           'data': statvals}
355        return statvals
356
357    def stddev(self, mask=None):
358        """
359        Determine the standard deviation of the current beam/if/pol
360        Takes a 'mask' as an optional parameter to specify which
361        channels should be excluded.
362        Parameters:
363            mask:    an optional mask specifying where the standard
364                     deviation should be determined.
365
366        Example:
367            scan.set_unit('channel')
368            msk = scan.create_mask([100, 200], [500, 600])
369            scan.stddev(mask=m)
370        """
371        return self.stats(stat='stddev', mask=mask);
372
373
374    def get_column_names(self):
375        """
376        Return a  list of column names, which can be used for selection.
377        """
378        return list(Scantable.get_column_names(self))
379
380    def get_tsys(self):
381        """
382        Return the System temperatures.
383        Returns:
384            a list of Tsys values for the current selection
385        """
386
387        return self._row_callback(self._gettsys, "Tsys")
388
389    def _row_callback(self, callback, label):
390        axes = []
391        axesnames = ['scanno', 'beamno', 'ifno', 'polno', 'cycleno']
392        out = ""
393        outvec = []
394        for i in range(self.nrow()):
395            axis = []
396            axis.append(self.getscan(i))
397            axis.append(self.getbeam(i))
398            axis.append(self.getif(i))
399            axis.append(self.getpol(i))
400            axis.append(self.getcycle(i))
401            axes.append(axis)
402            tm = self._gettime(i)
403            src = self._getsourcename(i)
404            out += 'Scan[%d] (%s) ' % (axis[0], src)
405            out += 'Time[%s]:\n' % (tm)
406            if self.nbeam(-1) > 1: out +=  ' Beam[%d] ' % (axis[1])
407            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (axis[2])
408            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (axis[3])
409            outvec.append(callback(i))
410            out += '= %3.3f\n' % (outvec[i])
411            out +=  "--------------------------------------------------\n"
412        if rcParams['verbose']:
413            print "--------------------------------------------------"
414            print " %s" % (label)
415            print "--------------------------------------------------"
416            print out
417        # disabled because the vector seems more useful
418        #retval = {'axesnames': axesnames, 'axes': axes, 'data': outvec}
419        return outvec
420
421    def _get_column(self, callback, row=-1):
422        """
423        """
424        if row == -1:
425            return [callback(i) for i in range(self.nrow())]
426        else:
427            if  0 <= row < self.nrow():
428                return callback(row)
429
430
431    def get_time(self, row=-1, asdatetime=False):
432        """
433        Get a list of time stamps for the observations.
434        Return a datetime object for each integration time stamp in the scantable.
435        Parameters:
436            row:          row no of integration. Default -1 return all rows
437            asdatetime:   return values as datetime objects rather than strings
438        Example:
439            none
440        """
441        from time import strptime
442        from datetime import datetime
443        if not asdatetime:
444            return self._get_column(self._gettime, row)
445        format = "%Y/%m/%d/%H:%M:%S"
446        if isinstance(times, list):
447            return [datetime(*strptime(i, format)[:6]) for i in times]
448        else:
449            return datetime(*strptime(times, format)[:6])
450
451
452    def get_inttime(self, row=-1):
453        """
454        Get a list of integration times for the observations.
455        Return a time in seconds for each integration in the scantable.
456        Parameters:
457            row:    row no of integration. Default -1 return all rows.
458        Example:
459            none
460        """
461        return self._get_column(self._getinttime, row)       
462       
463
464    def get_sourcename(self, row=-1):
465        """
466        Get a list source names for the observations.
467        Return a string for each integration in the scantable.
468        Parameters:
469            row:    row no of integration. Default -1 return all rows.
470        Example:
471            none
472        """
473        return self._get_column(self._getsourcename, row)
474
475    def get_elevation(self, row=-1):
476        """
477        Get a list of elevations for the observations.
478        Return a float for each integration in the scantable.
479        Parameters:
480            row:    row no of integration. Default -1 return all rows.
481        Example:
482            none
483        """
484        return self._get_column(self._getelevation, row)
485
486    def get_azimuth(self, row=-1):
487        """
488        Get a list of azimuths for the observations.
489        Return a float for each integration in the scantable.
490        Parameters:
491            row:    row no of integration. Default -1 return all rows.
492        Example:
493            none
494        """
495        return self._get_column(self._getazimuth, row)
496
497    def get_parangle(self, row=-1):
498        """
499        Get a list of parallactic angles for the observations.
500        Return a float for each integration in the scantable.
501        Parameters:
502            row:    row no of integration. Default -1 return all rows.
503        Example:
504            none
505        """
506        return self._get_column(self._getparangle, row)
507
508    def get_direction(self, row=-1):
509        """
510        Get a list of Positions on the sky (direction) for the observations.
511        Return a float for each integration in the scantable.
512        Parameters:
513            row:    row no of integration. Default -1 return all rows
514        Example:
515            none
516        """
517        return self._get_column(self._getdirection, row)
518
519    def set_unit(self, unit='channel'):
520        """
521        Set the unit for all following operations on this scantable
522        Parameters:
523            unit:    optional unit, default is 'channel'
524                     one of '*Hz', 'km/s', 'channel', ''
525        """
526        varlist = vars()
527        if unit in ['', 'pixel', 'channel']:
528            unit = ''
529        inf = list(self._getcoordinfo())
530        inf[0] = unit
531        self._setcoordinfo(inf)
532        self._add_history("set_unit", varlist)
533
534    def set_instrument(self, instr):
535        """
536        Set the instrument for subsequent processing.
537        Parameters:
538            instr:    Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
539                      'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
540        """
541        self._setInstrument(instr)
542        self._add_history("set_instument", vars())
543        print_log()
544
545    def set_feedtype(self, feedtype):
546        """
547        Overwrite the feed type, which might not be set correctly.
548        Parameters:
549            feedtype:     'linear' or 'circular'
550        """
551        self._setfeedtype(feedtype)
552        self._add_history("set_feedtype", vars())
553        print_log()
554
555    def set_doppler(self, doppler='RADIO'):
556        """
557        Set the doppler for all following operations on this scantable.
558        Parameters:
559            doppler:    One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
560        """
561        varlist = vars()
562        inf = list(self._getcoordinfo())
563        inf[2] = doppler
564        self._setcoordinfo(inf)
565        self._add_history("set_doppler", vars())
566        print_log()
567
568    def set_freqframe(self, frame=None):
569        """
570        Set the frame type of the Spectral Axis.
571        Parameters:
572            frame:   an optional frame type, default 'LSRK'. Valid frames are:
573                     'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
574                     'GEO', 'GALACTO', 'LGROUP', 'CMB'
575        Examples:
576            scan.set_freqframe('BARY')
577        """
578        if frame is None: frame = rcParams['scantable.freqframe']
579        varlist = vars()
580        valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
581                   'GEO', 'GALACTO', 'LGROUP', 'CMB']
582
583        if frame in valid:
584            inf = list(self._getcoordinfo())
585            inf[1] = frame
586            self._setcoordinfo(inf)
587            self._add_history("set_freqframe", varlist)
588        else:
589            msg  = "Please specify a valid freq type. Valid types are:\n", valid
590            if rcParams['verbose']:
591                print msg
592            else:
593                raise TypeError(msg)
594        print_log()
595
596    def set_dirframe(self, frame=""):
597        """
598        Set the frame type of the Direction on the sky.
599        Parameters:
600            frame:   an optional frame type, default ''. Valid frames are:
601                     'J2000', 'B1950', 'GALACTIC'
602        Examples:
603            scan.set_dirframe('GALACTIC')
604        """
605        varlist = vars()
606        try:
607            Scantable.set_dirframe(self, frame)
608        except RuntimeError, msg:
609            if rcParams['verbose']:
610                print msg
611            else:
612                raise
613        self._add_history("set_dirframe", varlist)
614
615    def get_unit(self):
616        """
617        Get the default unit set in this scantable
618        Returns:
619            A unit string
620        """
621        inf = self._getcoordinfo()
622        unit = inf[0]
623        if unit == '': unit = 'channel'
624        return unit
625
626    def get_abcissa(self, rowno=0):
627        """
628        Get the abcissa in the current coordinate setup for the currently
629        selected Beam/IF/Pol
630        Parameters:
631            rowno:    an optional row number in the scantable. Default is the
632                      first row, i.e. rowno=0
633        Returns:
634            The abcissa values and the format string (as a dictionary)
635        """
636        abc = self._getabcissa(rowno)
637        lbl = self._getabcissalabel(rowno)
638        print_log()
639        return abc, lbl
640
641    def flag(self, mask=None):
642        """
643        Flag the selected data using an optional channel mask.
644        Parameters:
645            mask:   an optional channel mask, created with create_mask. Default
646                    (no mask) is all channels.
647        """
648        varlist = vars()
649        if mask is None:
650            mask = []
651        try:
652            self._flag(mask)
653        except RuntimeError, msg:
654            if rcParams['verbose']:
655                print msg
656                return
657            else: raise
658        self._add_history("flag", varlist)
659
660    def lag_flag(self, frequency, width=0.0, unit="GHz", insitu=None):
661        """
662        Flag the data in 'lag' space by providing a frequency to remove.
663        Flagged data in the scantable gets set to 0.0 before the fft.
664        No taper is applied.
665        Parameters:
666            frequency:    the frequency (really a period within the bandwidth)
667                          to remove
668            width:        the width of the frequency to remove, to remove a
669                          range of frequencies aroung the centre.
670            unit:         the frequency unit (default "GHz")
671        Notes:
672            It is recommended to flag edges of the band or strong
673            signals beforehand.
674        """
675        if insitu is None: insitu = rcParams['insitu']
676        self._math._setinsitu(insitu)
677        varlist = vars()
678        base = { "GHz": 100000000., "MHz": 1000000., "kHz": 1000., "Hz": 1. }
679        if not base.has_key(unit):
680            raise ValueError("%s is not a valid unit." % unit)
681        try:
682            s = scantable(self._math._lag_flag(self, frequency*base[unit],
683                                               width*base[unit]))
684        except RuntimeError, msg:
685            if rcParams['verbose']:
686                print msg
687                return
688            else: raise
689        s._add_history("lag_flag", varlist)
690        print_log()
691        if insitu:
692            self._assign(s)
693        else:
694            return s
695
696
697    def create_mask(self, *args, **kwargs):
698        """
699        Compute and return a mask based on [min, max] windows.
700        The specified windows are to be INCLUDED, when the mask is
701        applied.
702        Parameters:
703            [min, max], [min2, max2], ...
704                Pairs of start/end points (inclusive)specifying the regions
705                to be masked
706            invert:     optional argument. If specified as True,
707                        return an inverted mask, i.e. the regions
708                        specified are EXCLUDED
709            row:        create the mask using the specified row for
710                        unit conversions, default is row=0
711                        only necessary if frequency varies over rows.
712        Example:
713            scan.set_unit('channel')
714            a)
715            msk = scan.create_mask([400, 500], [800, 900])
716            # masks everything outside 400 and 500
717            # and 800 and 900 in the unit 'channel'
718
719            b)
720            msk = scan.create_mask([400, 500], [800, 900], invert=True)
721            # masks the regions between 400 and 500
722            # and 800 and 900 in the unit 'channel'
723            c)
724            mask only channel 400
725            msk =  scan.create_mask([400, 400])
726        """
727        row = 0
728        if kwargs.has_key("row"):
729            row = kwargs.get("row")
730        data = self._getabcissa(row)
731        u = self._getcoordinfo()[0]
732        if rcParams['verbose']:
733            if u == "": u = "channel"
734            msg = "The current mask window unit is %s" % u
735            i = self._check_ifs()
736            if not i:
737                msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
738            asaplog.push(msg)
739        n = self.nchan()
740        msk = _n_bools(n, False)
741        # test if args is a 'list' or a 'normal *args - UGLY!!!
742
743        ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
744             and args or args[0]
745        for window in ws:
746            if (len(window) != 2 or window[0] > window[1] ):
747                raise TypeError("A window needs to be defined as [min, max]")
748            for i in range(n):
749                if data[i] >= window[0] and data[i] <= window[1]:
750                    msk[i] = True
751        if kwargs.has_key('invert'):
752            if kwargs.get('invert'):
753                msk = mask_not(msk)
754        print_log()
755        return msk
756
757    def get_restfreqs(self):
758        """
759        Get the restfrequency(s) stored in this scantable.
760        The return value(s) are always of unit 'Hz'
761        Parameters:
762            none
763        Returns:
764            a list of doubles
765        """
766        return list(self._getrestfreqs())
767
768
769    def set_restfreqs(self, freqs=None, unit='Hz'):
770        """
771        Set or replace the restfrequency specified and
772        If the 'freqs' argument holds a scalar,
773        then that rest frequency will be applied to all the selected
774        data.  If the 'freqs' argument holds
775        a vector, then it MUST be of equal or smaller length than
776        the number of IFs (and the available restfrequencies will be
777        replaced by this vector).  In this case, *all* data have
778        the restfrequency set per IF according
779        to the corresponding value you give in the 'freqs' vector.
780        E.g. 'freqs=[1e9, 2e9]'  would mean IF 0 gets restfreq 1e9 and
781        IF 1 gets restfreq 2e9.
782        You can also specify the frequencies via a linecatalog/
783
784        Parameters:
785            freqs:   list of rest frequency values or string idenitfiers
786            unit:    unit for rest frequency (default 'Hz')
787
788        Example:
789            # set the given restfrequency for the whole table
790            scan.set_restfreqs(freqs=1.4e9)
791            # If thee number of IFs in the data is >= 2 IF0 gets the first
792            # value IF1 the second...
793            scan.set_restfreqs(freqs=[1.4e9, 1.67e9])
794            #set the given restfrequency for the whole table (by name)
795            scan.set_restfreqs(freqs="OH1667")
796
797        Note:
798            To do more sophisticate Restfrequency setting, e.g. on a
799            source and IF basis, use scantable.set_selection() before using
800            this function.
801            # provide your scantable is call scan
802            selection = selector()
803            selection.set_name("ORION*")
804            selection.set_ifs([1])
805            scan.set_selection(selection)
806            scan.set_restfreqs(freqs=86.6e9)
807
808        """
809        varlist = vars()
810        from asap import linecatalog
811        # simple  value
812        if isinstance(freqs, int) or isinstance(freqs, float):
813            self._setrestfreqs(freqs, "",unit)
814        # list of values
815        elif isinstance(freqs, list) or isinstance(freqs, tuple):
816            # list values are scalars
817            if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
818                sel = selector()
819                savesel = self._getselection()
820                iflist = self.getifnos()
821                for i in xrange(len(freqs)):
822                    sel.set_ifs(iflist[i])
823                    self._setselection(sel)
824                    self._setrestfreqs(freqs[i], "",unit)
825                self._setselection(savesel)
826            # list values are tuples, (value, name)
827            elif isinstance(freqs[-1], dict):
828                sel = selector()
829                savesel = self._getselection()
830                iflist = self.getifnos()
831                for i in xrange(len(freqs)):
832                    sel.set_ifs(iflist[i])
833                    self._setselection(sel)
834                    self._setrestfreqs(freqs[i]["value"],
835                                       freqs[i]["name"], "MHz")
836                self._setselection(savesel)
837        # freqs are to be taken from a linecatalog
838        elif isinstance(freqs, linecatalog):
839            sel = selector()
840            savesel = self._getselection()
841            iflist = self.getifnos()
842            for i in xrange(freqs.nrow()):
843                sel.set_ifs(iflist[i])
844                self._setselection(sel)
845                self._setrestfreqs(freqs.get_frequency(i),
846                                   freqs.get_name(i), "MHz")
847                # ensure that we are not iterating past nIF
848                if i == self.nif()-1: break
849            self._setselection(savesel)
850        else:
851            return
852        self._add_history("set_restfreqs", varlist)
853
854    def shift_refpix(self, delta):
855        """
856        Shift the reference pixel of the Spectra Coordinate by an
857        integer amount.
858        Parameters:
859            delta:   the amount to shift by
860        Note:
861            Be careful using this with broadband data.
862        """
863        Scantable.shift(self, delta)
864
865    def history(self, filename=None):
866        """
867        Print the history. Optionally to a file.
868        Parameters:
869            filename:    The name  of the file to save the history to.
870        """
871        hist = list(self._gethistory())
872        out = "-"*80
873        for h in hist:
874            if h.startswith("---"):
875                out += "\n"+h
876            else:
877                items = h.split("##")
878                date = items[0]
879                func = items[1]
880                items = items[2:]
881                out += "\n"+date+"\n"
882                out += "Function: %s\n  Parameters:" % (func)
883                for i in items:
884                    s = i.split("=")
885                    out += "\n   %s = %s" % (s[0], s[1])
886                out += "\n"+"-"*80
887        if filename is not None:
888            if filename is "":
889                filename = 'scantable_history.txt'
890            import os
891            filename = os.path.expandvars(os.path.expanduser(filename))
892            if not os.path.isdir(filename):
893                data = open(filename, 'w')
894                data.write(out)
895                data.close()
896            else:
897                msg = "Illegal file name '%s'." % (filename)
898                if rcParams['verbose']:
899                    print msg
900                else:
901                    raise IOError(msg)
902        if rcParams['verbose']:
903            try:
904                from IPython.genutils import page as pager
905            except ImportError:
906                from pydoc import pager
907            pager(out)
908        else:
909            return out
910        return
911    #
912    # Maths business
913    #
914
915    def average_time(self, mask=None, scanav=False, weight='tint', align=False):
916        """
917        Return the (time) weighted average of a scan.
918        Note:
919            in channels only - align if necessary
920        Parameters:
921            mask:     an optional mask (only used for 'var' and 'tsys'
922                      weighting)
923            scanav:   True averages each scan separately
924                      False (default) averages all scans together,
925            weight:   Weighting scheme.
926                      'none'     (mean no weight)
927                      'var'      (1/var(spec) weighted)
928                      'tsys'     (1/Tsys**2 weighted)
929                      'tint'     (integration time weighted)
930                      'tintsys'  (Tint/Tsys**2)
931                      'median'   ( median averaging)
932                      The default is 'tint'
933            align:    align the spectra in velocity before averaging. It takes
934                      the time of the first spectrum as reference time.
935        Example:
936            # time average the scantable without using a mask
937            newscan = scan.average_time()
938        """
939        varlist = vars()
940        if weight is None: weight = 'TINT'
941        if mask is None: mask = ()
942        if scanav: scanav = "SCAN"
943        else: scanav = "NONE"
944        scan = (self, )
945        try:
946            if align:
947                scan = (self.freq_align(insitu=False), )
948            s = None
949            if weight.upper() == 'MEDIAN':
950                s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
951                                                         scanav))
952            else:
953                s = scantable(self._math._average(scan, mask, weight.upper(),
954                              scanav))
955        except RuntimeError, msg:
956            if rcParams['verbose']:
957                print msg
958                return
959            else: raise
960        s._add_history("average_time", varlist)
961        print_log()
962        return s
963
964    def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
965        """
966        Return a scan where all spectra are converted to either
967        Jansky or Kelvin depending upon the flux units of the scan table.
968        By default the function tries to look the values up internally.
969        If it can't find them (or if you want to over-ride), you must
970        specify EITHER jyperk OR eta (and D which it will try to look up
971        also if you don't set it). jyperk takes precedence if you set both.
972        Parameters:
973            jyperk:      the Jy / K conversion factor
974            eta:         the aperture efficiency
975            d:           the geomtric diameter (metres)
976            insitu:      if False a new scantable is returned.
977                         Otherwise, the scaling is done in-situ
978                         The default is taken from .asaprc (False)
979        """
980        if insitu is None: insitu = rcParams['insitu']
981        self._math._setinsitu(insitu)
982        varlist = vars()
983        if jyperk is None: jyperk = -1.0
984        if d is None: d = -1.0
985        if eta is None: eta = -1.0
986        s = scantable(self._math._convertflux(self, d, eta, jyperk))
987        s._add_history("convert_flux", varlist)
988        print_log()
989        if insitu: self._assign(s)
990        else: return s
991
992    def gain_el(self, poly=None, filename="", method="linear", insitu=None):
993        """
994        Return a scan after applying a gain-elevation correction.
995        The correction can be made via either a polynomial or a
996        table-based interpolation (and extrapolation if necessary).
997        You specify polynomial coefficients, an ascii table or neither.
998        If you specify neither, then a polynomial correction will be made
999        with built in coefficients known for certain telescopes (an error
1000        will occur if the instrument is not known).
1001        The data and Tsys are *divided* by the scaling factors.
1002        Parameters:
1003            poly:        Polynomial coefficients (default None) to compute a
1004                         gain-elevation correction as a function of
1005                         elevation (in degrees).
1006            filename:    The name of an ascii file holding correction factors.
1007                         The first row of the ascii file must give the column
1008                         names and these MUST include columns
1009                         "ELEVATION" (degrees) and "FACTOR" (multiply data
1010                         by this) somewhere.
1011                         The second row must give the data type of the
1012                         column. Use 'R' for Real and 'I' for Integer.
1013                         An example file would be
1014                         (actual factors are arbitrary) :
1015
1016                         TIME ELEVATION FACTOR
1017                         R R R
1018                         0.1 0 0.8
1019                         0.2 20 0.85
1020                         0.3 40 0.9
1021                         0.4 60 0.85
1022                         0.5 80 0.8
1023                         0.6 90 0.75
1024            method:      Interpolation method when correcting from a table.
1025                         Values are  "nearest", "linear" (default), "cubic"
1026                         and "spline"
1027            insitu:      if False a new scantable is returned.
1028                         Otherwise, the scaling is done in-situ
1029                         The default is taken from .asaprc (False)
1030        """
1031
1032        if insitu is None: insitu = rcParams['insitu']
1033        self._math._setinsitu(insitu)
1034        varlist = vars()
1035        if poly is None:
1036            poly = ()
1037        from os.path import expandvars
1038        filename = expandvars(filename)
1039        s = scantable(self._math._gainel(self, poly, filename, method))
1040        s._add_history("gain_el", varlist)
1041        print_log()
1042        if insitu: self._assign(s)
1043        else: return s
1044
1045    def freq_align(self, reftime=None, method='cubic', insitu=None):
1046        """
1047        Return a scan where all rows have been aligned in frequency/velocity.
1048        The alignment frequency frame (e.g. LSRK) is that set by function
1049        set_freqframe.
1050        Parameters:
1051            reftime:     reference time to align at. By default, the time of
1052                         the first row of data is used.
1053            method:      Interpolation method for regridding the spectra.
1054                         Choose from "nearest", "linear", "cubic" (default)
1055                         and "spline"
1056            insitu:      if False a new scantable is returned.
1057                         Otherwise, the scaling is done in-situ
1058                         The default is taken from .asaprc (False)
1059        """
1060        if insitu is None: insitu = rcParams["insitu"]
1061        self._math._setinsitu(insitu)
1062        varlist = vars()
1063        if reftime is None: reftime = ""
1064        s = scantable(self._math._freq_align(self, reftime, method))
1065        s._add_history("freq_align", varlist)
1066        print_log()
1067        if insitu: self._assign(s)
1068        else: return s
1069
1070    def opacity(self, tau, insitu=None):
1071        """
1072        Apply an opacity correction. The data
1073        and Tsys are multiplied by the correction factor.
1074        Parameters:
1075            tau:         Opacity from which the correction factor is
1076                         exp(tau*ZD)
1077                         where ZD is the zenith-distance
1078            insitu:      if False a new scantable is returned.
1079                         Otherwise, the scaling is done in-situ
1080                         The default is taken from .asaprc (False)
1081        """
1082        if insitu is None: insitu = rcParams['insitu']
1083        self._math._setinsitu(insitu)
1084        varlist = vars()
1085        s = scantable(self._math._opacity(self, tau))
1086        s._add_history("opacity", varlist)
1087        print_log()
1088        if insitu: self._assign(s)
1089        else: return s
1090
1091    def bin(self, width=5, insitu=None):
1092        """
1093        Return a scan where all spectra have been binned up.
1094        Parameters:
1095            width:       The bin width (default=5) in pixels
1096            insitu:      if False a new scantable is returned.
1097                         Otherwise, the scaling is done in-situ
1098                         The default is taken from .asaprc (False)
1099        """
1100        if insitu is None: insitu = rcParams['insitu']
1101        self._math._setinsitu(insitu)
1102        varlist = vars()
1103        s = scantable(self._math._bin(self, width))
1104        s._add_history("bin", varlist)
1105        print_log()
1106        if insitu: self._assign(s)
1107        else: return s
1108
1109
1110    def resample(self, width=5, method='cubic', insitu=None):
1111        """
1112        Return a scan where all spectra have been binned up.
1113       
1114        Parameters:
1115            width:       The bin width (default=5) in pixels
1116            method:      Interpolation method when correcting from a table.
1117                         Values are  "nearest", "linear", "cubic" (default)
1118                         and "spline"
1119            insitu:      if False a new scantable is returned.
1120                         Otherwise, the scaling is done in-situ
1121                         The default is taken from .asaprc (False)
1122        """
1123        if insitu is None: insitu = rcParams['insitu']
1124        self._math._setinsitu(insitu)
1125        varlist = vars()
1126        s = scantable(self._math._resample(self, method, width))
1127        s._add_history("resample", varlist)
1128        print_log()
1129        if insitu: self._assign(s)
1130        else: return s
1131
1132
1133    def average_pol(self, mask=None, weight='none'):
1134        """
1135        Average the Polarisations together.
1136        Parameters:
1137            mask:        An optional mask defining the region, where the
1138                         averaging will be applied. The output will have all
1139                         specified points masked.
1140            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1141                         weighted), or 'tsys' (1/Tsys**2 weighted)
1142        """
1143        varlist = vars()
1144        if mask is None:
1145            mask = ()
1146        s = scantable(self._math._averagepol(self, mask, weight.upper()))
1147        s._add_history("average_pol", varlist)
1148        print_log()
1149        return s
1150
1151    def average_beam(self, mask=None, weight='none'):
1152        """
1153        Average the Beams together.
1154        Parameters:
1155            mask:        An optional mask defining the region, where the
1156                         averaging will be applied. The output will have all
1157                         specified points masked.
1158            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1159                         weighted), or 'tsys' (1/Tsys**2 weighted)
1160        """
1161        varlist = vars()
1162        if mask is None:
1163            mask = ()
1164        s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1165        s._add_history("average_beam", varlist)
1166        print_log()
1167        return s
1168
1169    def convert_pol(self, poltype=None):
1170        """
1171        Convert the data to a different polarisation type.
1172        Parameters:
1173            poltype:    The new polarisation type. Valid types are:
1174                        "linear", "stokes" and "circular"
1175        """
1176        varlist = vars()
1177        try:
1178            s = scantable(self._math._convertpol(self, poltype))
1179        except RuntimeError, msg:
1180            if rcParams['verbose']:
1181                print msg
1182                return
1183            else:
1184                raise
1185        s._add_history("convert_pol", varlist)
1186        print_log()
1187        return s
1188
1189    def smooth(self, kernel="hanning", width=5.0, insitu=None):
1190        """
1191        Smooth the spectrum by the specified kernel (conserving flux).
1192        Parameters:
1193            kernel:     The type of smoothing kernel. Select from
1194                        'hanning' (default), 'gaussian' and 'boxcar'.
1195                        The first three characters are sufficient.
1196            width:      The width of the kernel in pixels. For hanning this is
1197                        ignored otherwise it defauls to 5 pixels.
1198                        For 'gaussian' it is the Full Width Half
1199                        Maximum. For 'boxcar' it is the full width.
1200            insitu:     if False a new scantable is returned.
1201                        Otherwise, the scaling is done in-situ
1202                        The default is taken from .asaprc (False)
1203        Example:
1204             none
1205        """
1206        if insitu is None: insitu = rcParams['insitu']
1207        self._math._setinsitu(insitu)
1208        varlist = vars()
1209        s = scantable(self._math._smooth(self, kernel.lower(), width))
1210        s._add_history("smooth", varlist)
1211        print_log()
1212        if insitu: self._assign(s)
1213        else: return s
1214
1215
1216    def poly_baseline(self, mask=None, order=0, plot=False, insitu=None):
1217        """
1218        Return a scan which has been baselined (all rows) by a polynomial.
1219        Parameters:
1220            mask:       an optional mask
1221            order:      the order of the polynomial (default is 0)
1222            plot:       plot the fit and the residual. In this each
1223                        indivual fit has to be approved, by typing 'y'
1224                        or 'n'
1225            insitu:     if False a new scantable is returned.
1226                        Otherwise, the scaling is done in-situ
1227                        The default is taken from .asaprc (False)
1228        Example:
1229            # return a scan baselined by a third order polynomial,
1230            # not using a mask
1231            bscan = scan.poly_baseline(order=3)
1232        """
1233        if insitu is None: insitu = rcParams['insitu']
1234        varlist = vars()
1235        if mask is None:
1236            mask = [True for i in xrange(self.nchan(-1))]
1237        from asap.asapfitter import fitter
1238        try:
1239            f = fitter()
1240            f.set_scan(self, mask)
1241            f.set_function(poly=order)
1242            s = f.auto_fit(insitu, plot=plot)
1243            s._add_history("poly_baseline", varlist)
1244            print_log()
1245            if insitu: self._assign(s)
1246            else: return s
1247        except RuntimeError:
1248            msg = "The fit failed, possibly because it didn't converge."
1249            if rcParams['verbose']:
1250                print msg
1251                return
1252            else:
1253                raise RuntimeError(msg)
1254
1255
1256    def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
1257                           threshold=3, chan_avg_limit=1, plot=False,
1258                           insitu=None):
1259        """
1260        Return a scan which has been baselined (all rows) by a polynomial.
1261        Spectral lines are detected first using linefinder and masked out
1262        to avoid them affecting the baseline solution.
1263
1264        Parameters:
1265            mask:       an optional mask retreived from scantable
1266            edge:       an optional number of channel to drop at
1267                        the edge of spectrum. If only one value is
1268                        specified, the same number will be dropped from
1269                        both sides of the spectrum. Default is to keep
1270                        all channels. Nested tuples represent individual
1271                        edge selection for different IFs (a number of spectral
1272                        channels can be different)
1273            order:      the order of the polynomial (default is 0)
1274            threshold:  the threshold used by line finder. It is better to
1275                        keep it large as only strong lines affect the
1276                        baseline solution.
1277            chan_avg_limit:
1278                        a maximum number of consequtive spectral channels to
1279                        average during the search of weak and broad lines.
1280                        The default is no averaging (and no search for weak
1281                        lines). If such lines can affect the fitted baseline
1282                        (e.g. a high order polynomial is fitted), increase this
1283                        parameter (usually values up to 8 are reasonable). Most
1284                        users of this method should find the default value
1285                        sufficient.
1286            plot:       plot the fit and the residual. In this each
1287                        indivual fit has to be approved, by typing 'y'
1288                        or 'n'
1289            insitu:     if False a new scantable is returned.
1290                        Otherwise, the scaling is done in-situ
1291                        The default is taken from .asaprc (False)
1292
1293        Example:
1294            scan2=scan.auto_poly_baseline(order=7)
1295        """
1296        if insitu is None: insitu = rcParams['insitu']
1297        varlist = vars()
1298        from asap.asapfitter import fitter
1299        from asap.asaplinefind import linefinder
1300        from asap import _is_sequence_or_number as _is_valid
1301
1302        # check whether edge is set up for each IF individually
1303        individualedge = False;
1304        if len(edge) > 1:
1305            if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1306                individualedge = True;
1307
1308        if not _is_valid(edge, int) and not individualedge:
1309            raise ValueError, "Parameter 'edge' has to be an integer or a \
1310            pair of integers specified as a tuple. Nested tuples are allowed \
1311            to make individual selection for different IFs."
1312
1313        curedge = (0, 0)
1314        if individualedge:
1315            for edgepar in edge:
1316                if not _is_valid(edgepar, int):
1317                    raise ValueError, "Each element of the 'edge' tuple has \
1318                                       to be a pair of integers or an integer."
1319        else:
1320            curedge = edge;
1321
1322        # setup fitter
1323        f = fitter()
1324        f.set_function(poly=order)
1325
1326        # setup line finder
1327        fl = linefinder()
1328        fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
1329
1330        if not insitu:
1331            workscan = self.copy()
1332        else:
1333            workscan = self
1334
1335        fl.set_scan(workscan)
1336
1337        rows = range(workscan.nrow())
1338        asaplog.push("Processing:")
1339        for r in rows:
1340            msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1341                (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1342                 workscan.getpol(r), workscan.getcycle(r))
1343            asaplog.push(msg, False)
1344
1345            # figure out edge parameter
1346            if individualedge:
1347                if len(edge) >= workscan.getif(r):
1348                    raise RuntimeError, "Number of edge elements appear to " \
1349                                        "be less than the number of IFs"
1350                    curedge = edge[workscan.getif(r)]
1351
1352            # setup line finder
1353            fl.find_lines(r, mask, curedge)
1354            f.set_scan(workscan, fl.get_mask())
1355            f.x = workscan._getabcissa(r)
1356            f.y = workscan._getspectrum(r)
1357            f.data = None
1358            f.fit()
1359            x = f.get_parameters()
1360            if plot:
1361                f.plot(residual=True)
1362                x = raw_input("Accept fit ( [y]/n ): ")
1363                if x.upper() == 'N':
1364                    continue
1365            workscan._setspectrum(f.fitter.getresidual(), r)
1366        if plot:
1367            f._p.unmap()
1368            f._p = None
1369        workscan._add_history("auto_poly_baseline", varlist)
1370        if insitu:
1371            self._assign(workscan)
1372        else:
1373            return workscan
1374
1375    def rotate_linpolphase(self, angle):
1376        """
1377        Rotate the phase of the complex polarization O=Q+iU correlation.
1378        This is always done in situ in the raw data.  So if you call this
1379        function more than once then each call rotates the phase further.
1380        Parameters:
1381            angle:   The angle (degrees) to rotate (add) by.
1382        Examples:
1383            scan.rotate_linpolphase(2.3)
1384        """
1385        varlist = vars()
1386        self._math._rotate_linpolphase(self, angle)
1387        self._add_history("rotate_linpolphase", varlist)
1388        print_log()
1389        return
1390
1391
1392    def rotate_xyphase(self, angle):
1393        """
1394        Rotate the phase of the XY correlation.  This is always done in situ
1395        in the data.  So if you call this function more than once
1396        then each call rotates the phase further.
1397        Parameters:
1398            angle:   The angle (degrees) to rotate (add) by.
1399        Examples:
1400            scan.rotate_xyphase(2.3)
1401        """
1402        varlist = vars()
1403        self._math._rotate_xyphase(self, angle)
1404        self._add_history("rotate_xyphase", varlist)
1405        print_log()
1406        return
1407
1408    def swap_linears(self):
1409        """
1410        Swap the linear polarisations XX and YY, or better the first two
1411        polarisations as this also works for ciculars.
1412        """
1413        varlist = vars()
1414        self._math._swap_linears(self)
1415        self._add_history("swap_linears", varlist)
1416        print_log()
1417        return
1418
1419    def invert_phase(self):
1420        """
1421        Invert the phase of the complex polarisation
1422        """
1423        varlist = vars()
1424        self._math._invert_phase(self)
1425        self._add_history("invert_phase", varlist)
1426        print_log()
1427        return
1428
1429    def add(self, offset, insitu=None):
1430        """
1431        Return a scan where all spectra have the offset added
1432        Parameters:
1433            offset:      the offset
1434            insitu:      if False a new scantable is returned.
1435                         Otherwise, the scaling is done in-situ
1436                         The default is taken from .asaprc (False)
1437        """
1438        if insitu is None: insitu = rcParams['insitu']
1439        self._math._setinsitu(insitu)
1440        varlist = vars()
1441        s = scantable(self._math._unaryop(self, offset, "ADD", False))
1442        s._add_history("add", varlist)
1443        print_log()
1444        if insitu:
1445            self._assign(s)
1446        else:
1447            return s
1448
1449    def scale(self, factor, tsys=True, insitu=None):
1450        """
1451        Return a scan where all spectra are scaled by the give 'factor'
1452        Parameters:
1453            factor:      the scaling factor
1454            insitu:      if False a new scantable is returned.
1455                         Otherwise, the scaling is done in-situ
1456                         The default is taken from .asaprc (False)
1457            tsys:        if True (default) then apply the operation to Tsys
1458                         as well as the data
1459        """
1460        if insitu is None: insitu = rcParams['insitu']
1461        self._math._setinsitu(insitu)
1462        varlist = vars()
1463        s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
1464        s._add_history("scale", varlist)
1465        print_log()
1466        if insitu:
1467            self._assign(s)
1468        else:
1469            return s
1470
1471    def auto_quotient(self, preserve=True, mode='paired'):
1472        """
1473        This function allows to build quotients automatically.
1474        It assumes the observation to have the same numer of
1475        "ons" and "offs"
1476        Parameters:
1477            preserve:       you can preserve (default) the continuum or
1478                            remove it.  The equations used are
1479                            preserve: Output = Toff * (on/off) - Toff
1480                            remove:   Output = Toff * (on/off) - Ton
1481            mode:           the on/off detection mode
1482                            'paired' (default)
1483                            identifies 'off' scans by the
1484                            trailing '_R' (Mopra/Parkes) or
1485                            '_e'/'_w' (Tid) and matches
1486                            on/off pairs from the observing pattern
1487                'time'
1488                   finds the closest off in time
1489
1490        """
1491        modes = ["time", "paired"]
1492        if not mode in modes:
1493            msg = "please provide valid mode. Valid modes are %s" % (modes)
1494            raise ValueError(msg)
1495        varlist = vars()
1496        s = None
1497        if mode.lower() == "paired":
1498            basesel = self.get_selection()
1499            sel = selector()+basesel
1500            sel.set_query("SRCTYPE==1")
1501            self.set_selection(sel)
1502            offs = self.copy()
1503            sel.set_query("SRCTYPE==0")
1504            self.set_selection(sel)
1505            ons = self.copy()
1506            s = scantable(self._math._quotient(ons, offs, preserve))
1507            self.set_selection(basesel)
1508        elif mode.lower() == "time":
1509            s = scantable(self._math._auto_quotient(self, mode, preserve))
1510        s._add_history("auto_quotient", varlist)
1511        print_log()
1512        return s
1513
1514    def mx_quotient(self, mask = None, weight='median', preserve=True):
1515        """
1516        Form a quotient using "off" beams when observing in "MX" mode.
1517        Parameters:
1518            mask:           an optional mask to be used when weight == 'stddev'
1519            weight:         How to average the off beams.  Default is 'median'.
1520            preserve:       you can preserve (default) the continuum or
1521                            remove it.  The equations used are
1522                            preserve: Output = Toff * (on/off) - Toff
1523                            remove:   Output = Toff * (on/off) - Ton
1524        """
1525        if mask is None: mask = ()
1526        varlist = vars()
1527        on = scantable(self._math._mx_extract(self, 'on'))
1528        preoff = scantable(self._math._mx_extract(self, 'off'))
1529        off = preoff.average_time(mask=mask, weight=weight, scanav=False)
1530        from asapmath  import quotient
1531        q = quotient(on, off, preserve)
1532        q._add_history("mx_quotient", varlist)
1533        print_log()
1534        return q
1535
1536    def freq_switch(self, insitu=None):
1537        """
1538        Apply frequency switching to the data.
1539        Parameters:
1540            insitu:      if False a new scantable is returned.
1541                         Otherwise, the swictching is done in-situ
1542                         The default is taken from .asaprc (False)
1543        Example:
1544            none
1545        """
1546        if insitu is None: insitu = rcParams['insitu']
1547        self._math._setinsitu(insitu)
1548        varlist = vars()
1549        s = scantable(self._math._freqswitch(self))
1550        s._add_history("freq_switch", varlist)
1551        print_log()
1552        if insitu: self._assign(s)
1553        else: return s
1554
1555    def recalc_azel(self):
1556        """
1557        Recalculate the azimuth and elevation for each position.
1558        Parameters:
1559            none
1560        Example:
1561        """
1562        varlist = vars()
1563        self._recalcazel()
1564        self._add_history("recalc_azel", varlist)
1565        print_log()
1566        return
1567
1568    def __add__(self, other):
1569        varlist = vars()
1570        s = None
1571        if isinstance(other, scantable):
1572            s = scantable(self._math._binaryop(self, other, "ADD"))
1573        elif isinstance(other, float):
1574            s = scantable(self._math._unaryop(self, other, "ADD", False))
1575        else:
1576            raise TypeError("Other input is not a scantable or float value")
1577        s._add_history("operator +", varlist)
1578        print_log()
1579        return s
1580
1581    def __sub__(self, other):
1582        """
1583        implicit on all axes and on Tsys
1584        """
1585        varlist = vars()
1586        s = None
1587        if isinstance(other, scantable):
1588            s = scantable(self._math._binaryop(self, other, "SUB"))
1589        elif isinstance(other, float):
1590            s = scantable(self._math._unaryop(self, other, "SUB", False))
1591        else:
1592            raise TypeError("Other input is not a scantable or float value")
1593        s._add_history("operator -", varlist)
1594        print_log()
1595        return s
1596
1597    def __mul__(self, other):
1598        """
1599        implicit on all axes and on Tsys
1600        """
1601        varlist = vars()
1602        s = None
1603        if isinstance(other, scantable):
1604            s = scantable(self._math._binaryop(self, other, "MUL"))
1605        elif isinstance(other, float):
1606            s = scantable(self._math._unaryop(self, other, "MUL", False))
1607        else:
1608            raise TypeError("Other input is not a scantable or float value")
1609        s._add_history("operator *", varlist)
1610        print_log()
1611        return s
1612
1613
1614    def __div__(self, other):
1615        """
1616        implicit on all axes and on Tsys
1617        """
1618        varlist = vars()
1619        s = None
1620        if isinstance(other, scantable):
1621            s = scantable(self._math._binaryop(self, other, "DIV"))
1622        elif isinstance(other, float):
1623            if other == 0.0:
1624                raise ZeroDivisionError("Dividing by zero is not recommended")
1625            s = scantable(self._math._unaryop(self, other, "DIV", False))
1626        else:
1627            raise TypeError("Other input is not a scantable or float value")
1628        s._add_history("operator /", varlist)
1629        print_log()
1630        return s
1631
1632    def get_fit(self, row=0):
1633        """
1634        Print or return the stored fits for a row in the scantable
1635        Parameters:
1636            row:    the row which the fit has been applied to.
1637        """
1638        if row > self.nrow():
1639            return
1640        from asap.asapfit import asapfit
1641        fit = asapfit(self._getfit(row))
1642        if rcParams['verbose']:
1643            print fit
1644            return
1645        else:
1646            return fit.as_dict()
1647
1648    def _add_history(self, funcname, parameters):
1649        # create date
1650        sep = "##"
1651        from datetime import datetime
1652        dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1653        hist = dstr+sep
1654        hist += funcname+sep#cdate+sep
1655        if parameters.has_key('self'): del parameters['self']
1656        for k, v in parameters.iteritems():
1657            if type(v) is dict:
1658                for k2, v2 in v.iteritems():
1659                    hist += k2
1660                    hist += "="
1661                    if isinstance(v2, scantable):
1662                        hist += 'scantable'
1663                    elif k2 == 'mask':
1664                        if isinstance(v2, list) or isinstance(v2, tuple):
1665                            hist += str(self._zip_mask(v2))
1666                        else:
1667                            hist += str(v2)
1668                    else:
1669                        hist += str(v2)
1670            else:
1671                hist += k
1672                hist += "="
1673                if isinstance(v, scantable):
1674                    hist += 'scantable'
1675                elif k == 'mask':
1676                    if isinstance(v, list) or isinstance(v, tuple):
1677                        hist += str(self._zip_mask(v))
1678                    else:
1679                        hist += str(v)
1680                else:
1681                    hist += str(v)
1682            hist += sep
1683        hist = hist[:-2] # remove trailing '##'
1684        self._addhistory(hist)
1685
1686
1687    def _zip_mask(self, mask):
1688        mask = list(mask)
1689        i = 0
1690        segments = []
1691        while mask[i:].count(1):
1692            i += mask[i:].index(1)
1693            if mask[i:].count(0):
1694                j = i + mask[i:].index(0)
1695            else:
1696                j = len(mask)
1697            segments.append([i, j])
1698            i = j
1699        return segments
1700
1701    def _get_ordinate_label(self):
1702        fu = "("+self.get_fluxunit()+")"
1703        import re
1704        lbl = "Intensity"
1705        if re.match(".K.", fu):
1706            lbl = "Brightness Temperature "+ fu
1707        elif re.match(".Jy.", fu):
1708            lbl = "Flux density "+ fu
1709        return lbl
1710
1711    def _check_ifs(self):
1712        nchans = [self.nchan(i) for i in range(self.nif(-1))]
1713        nchans = filter(lambda t: t > 0, nchans)
1714        return (sum(nchans)/len(nchans) == nchans[0])
1715
1716    def _fill(self, names, unit, average):
1717        import os
1718        from asap._asap import stfiller
1719        first = True
1720        fullnames = []
1721        for name in names:
1722            name = os.path.expandvars(name)
1723            name = os.path.expanduser(name)
1724            if not os.path.exists(name):
1725                msg = "File '%s' does not exists" % (name)
1726                if rcParams['verbose']:
1727                    asaplog.push(msg)
1728                    print asaplog.pop().strip()
1729                    return
1730                raise IOError(msg)
1731            fullnames.append(name)
1732        if average:
1733            asaplog.push('Auto averaging integrations')
1734        stype = int(rcParams['scantable.storage'].lower() == 'disk')
1735        for name in fullnames:
1736            tbl = Scantable(stype)
1737            r = stfiller(tbl)
1738            msg = "Importing %s..." % (name)
1739            asaplog.push(msg, False)
1740            print_log()
1741            r._open(name, -1, -1)
1742            r._read()
1743            #tbl = r._getdata()
1744            if average:
1745                tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
1746                #tbl = tbl2
1747            if not first:
1748                tbl = self._math._merge([self, tbl])
1749                #tbl = tbl2
1750            Scantable.__init__(self, tbl)
1751            r._close()
1752            del r, tbl
1753            first = False
1754        if unit is not None:
1755            self.set_fluxunit(unit)
1756        self.set_freqframe(rcParams['scantable.freqframe'])
1757
Note: See TracBrowser for help on using the repository browser.