source: trunk/python/scantable.py @ 1192

Last change on this file since 1192 was 1192, checked in by mar637, 18 years ago

added lag_flag - flagging of frequencies in fft space

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 61.8 KB
Line 
1from asap._asap import Scantable
2from asap import rcParams
3from asap import print_log
4from asap import asaplog
5from asap import selector
6from asap import NUM
7from asap import linecatalog
8
9class scantable(Scantable):
10    """
11        The ASAP container for scans
12    """
13
14    def __init__(self, filename, average=None, unit=None):
15        """
16        Create a scantable from a saved one or make a reference
17        Parameters:
18            filename:    the name of an asap table on disk
19                         or
20                         the name of a rpfits/sdfits/ms file
21                         (integrations within scans are auto averaged
22                         and the whole file is read)
23                         or
24                         [advanced] a reference to an existing
25                         scantable
26            average:     average all integrations withinb a scan on read.
27                         The default (True) is taken from .asaprc.
28            unit:         brightness unit; must be consistent with K or Jy.
29                         Over-rides the default selected by the reader
30                         (input rpfits/sdfits/ms) or replaces the value
31                         in existing scantables
32        """
33        if average is None:
34            average = rcParams['scantable.autoaverage']
35        #varlist = vars()
36        from asap._asap import stmath
37        self._math = stmath()
38        if isinstance(filename, Scantable):
39            Scantable.__init__(self, filename)
40        else:
41            if isinstance(filename, str):
42                import os.path
43                filename = os.path.expandvars(filename)
44                filename = os.path.expanduser(filename)
45                if not os.path.exists(filename):
46                    s = "File '%s' not found." % (filename)
47                    if rcParams['verbose']:
48                        asaplog.push(s)
49                        print asaplog.pop().strip()
50                        return
51                    raise IOError(s)
52                if os.path.isdir(filename) \
53                    and not os.path.exists(filename+'/table.f1'):
54                    # crude check if asap table
55                    if os.path.exists(filename+'/table.info'):
56                        ondisk = rcParams['scantable.storage'] == 'disk'
57                        Scantable.__init__(self, filename, ondisk)
58                        if unit is not None:
59                            self.set_fluxunit(unit)
60                        self.set_freqframe(rcParams['scantable.freqframe'])
61                    else:
62                        msg = "The given file '%s'is not a valid " \
63                              "asap table." % (filename)
64                        if rcParams['verbose']:
65                            print msg
66                            return
67                        else:
68                            raise IOError(msg)
69                else:
70                    self._fill([filename], unit, average)
71            elif (isinstance(filename, list) or isinstance(filename, tuple)) \
72                  and isinstance(filename[-1], str):
73                self._fill(filename, unit, average)
74        print_log()
75
76    def save(self, name=None, format=None, overwrite=False):
77        """
78        Store the scantable on disk. This can be an asap (aips++) Table, SDFITS,
79        Image FITS or MS2 format.
80        Parameters:
81            name:        the name of the outputfile. For format "ASCII"
82                         this is the root file name (data in 'name'.txt
83                         and header in 'name'_header.txt)
84            format:      an optional file format. Default is ASAP.
85                         Allowed are - 'ASAP' (save as ASAP [aips++] Table),
86                                       'SDFITS' (save as SDFITS file)
87                                       'ASCII' (saves as ascii text file)
88                                       'MS2' (saves as an aips++
89                                              MeasurementSet V2)
90            overwrite:   If the file should be overwritten if it exists.
91                         The default False is to return with warning
92                         without writing the output. USE WITH CARE.
93        Example:
94            scan.save('myscan.asap')
95            scan.save('myscan.sdfits', 'SDFITS')
96        """
97        from os import path
98        if format is None: format = rcParams['scantable.save']
99        suffix = '.'+format.lower()
100        if name is None or name == "":
101            name = 'scantable'+suffix
102            msg = "No filename given. Using default name %s..." % name
103            asaplog.push(msg)
104        name = path.expandvars(name)
105        if path.isfile(name) or path.isdir(name):
106            if not overwrite:
107                msg = "File %s exists." % name
108                if rcParams['verbose']:
109                    print msg
110                    return
111                else:
112                    raise IOError(msg)
113        format2 = format.upper()
114        if format2 == 'ASAP':
115            self._save(name)
116        else:
117            from asap._asap import stwriter as stw
118            writer = stw(format2)
119            writer.write(self, name)
120        print_log()
121        return
122
123    def copy(self):
124        """
125        Return a copy of this scantable.
126        Parameters:
127            none
128        Example:
129            copiedscan = scan.copy()
130        """
131        sd = scantable(Scantable._copy(self))
132        return sd
133
134    def drop_scan(self, scanid=None):
135        """
136        Return a new scantable where the specified scan number(s) has(have)
137        been dropped.
138        Parameters:
139            scanid:    a (list of) scan number(s)
140        """
141        from asap import _is_sequence_or_number as _is_valid
142        from asap import _to_list
143        from asap import unique
144        if not _is_valid(scanid):
145            if rcParams['verbose']:
146                print "Please specify a scanno to drop from the scantable"
147                return
148            else:
149                raise RuntimeError("No scan given")
150        try:
151            scanid = _to_list(scanid)
152            allscans = unique([ self.getscan(i) for i in range(self.nrow())])
153            for sid in scanid: allscans.remove(sid)
154            if len(allscans) == 0:
155                raise ValueError("Can't remove all scans")
156        except ValueError:
157            if rcParams['verbose']:
158                print "Couldn't find any match."
159                return
160            else: raise
161        try:
162            bsel = self.get_selection()
163            sel = selector()
164            sel.set_scans(allscans)
165            self.set_selection(bsel+sel)
166            scopy = self._copy()
167            self.set_selection(bsel)
168            return scantable(scopy)
169        except RuntimeError:
170            if rcParams['verbose']:
171                print "Couldn't find any match."
172            else:
173                raise
174
175
176    def get_scan(self, scanid=None):
177        """
178        Return a specific scan (by scanno) or collection of scans (by
179        source name) in a new scantable.
180        Parameters:
181            scanid:    a (list of) scanno or a source name, unix-style
182                       patterns are accepted for source name matching, e.g.
183                       '*_R' gets all 'ref scans
184        Example:
185            # get all scans containing the source '323p459'
186            newscan = scan.get_scan('323p459')
187            # get all 'off' scans
188            refscans = scan.get_scan('*_R')
189            # get a susbset of scans by scanno (as listed in scan.summary())
190            newscan = scan.get_scan([0, 2, 7, 10])
191        """
192        if scanid is None:
193            if rcParams['verbose']:
194                print "Please specify a scan no or name to " \
195                      "retrieve from the scantable"
196                return
197            else:
198                raise RuntimeError("No scan given")
199
200        try:
201            bsel = self.get_selection()
202            sel = selector()
203            if type(scanid) is str:
204                sel.set_name(scanid)
205                self.set_selection(bsel+sel)
206                scopy = self._copy()
207                self.set_selection(bsel)
208                return scantable(scopy)
209            elif type(scanid) is int:
210                sel.set_scans([scanid])
211                self.set_selection(bsel+sel)
212                scopy = self._copy()
213                self.set_selection(bsel)
214                return scantable(scopy)
215            elif type(scanid) is list:
216                sel.set_scans(scanid)
217                self.set_selection(sel)
218                scopy = self._copy()
219                self.set_selection(bsel)
220                return scantable(scopy)
221            else:
222                msg = "Illegal scanid type, use 'int' or 'list' if ints."
223                if rcParams['verbose']:
224                    print msg
225                else:
226                    raise TypeError(msg)
227        except RuntimeError:
228            if rcParams['verbose']: print "Couldn't find any match."
229            else: raise
230
231    def __str__(self):
232        return Scantable._summary(self, True)
233
234    def summary(self, filename=None):
235        """
236        Print a summary of the contents of this scantable.
237        Parameters:
238            filename:    the name of a file to write the putput to
239                         Default - no file output
240            verbose:     print extra info such as the frequency table
241                         The default (False) is taken from .asaprc
242        """
243        info = Scantable._summary(self, True)
244        #if verbose is None: verbose = rcParams['scantable.verbosesummary']
245        if filename is not None:
246            if filename is "":
247                filename = 'scantable_summary.txt'
248            from os.path import expandvars, isdir
249            filename = expandvars(filename)
250            if not isdir(filename):
251                data = open(filename, 'w')
252                data.write(info)
253                data.close()
254            else:
255                msg = "Illegal file name '%s'." % (filename)
256                if rcParams['verbose']:
257                    print msg
258                else:
259                    raise IOError(msg)
260        if rcParams['verbose']:
261            try:
262                from IPython.genutils import page as pager
263            except ImportError:
264                from pydoc import pager
265            pager(info)
266        else:
267            return info
268
269
270    def get_selection(self):
271        """
272        Get the selection object currently set on this scantable.
273        Parameters:
274            none
275        Example:
276            sel = scan.get_selection()
277            sel.set_ifs(0)              # select IF 0
278            scan.set_selection(sel)     # apply modified selection
279        """
280        return selector(self._getselection())
281
282    def set_selection(self, selection=selector()):
283        """
284        Select a subset of the data. All following operations on this scantable
285        are only applied to thi selection.
286        Parameters:
287            selection:    a selector object (default unset the selection)
288        Examples:
289            sel = selector()         # create a selection object
290            self.set_scans([0, 3])    # select SCANNO 0 and 3
291            scan.set_selection(sel)  # set the selection
292            scan.summary()           # will only print summary of scanno 0 an 3
293            scan.set_selection()     # unset the selection
294        """
295        self._setselection(selection)
296
297    def stats(self, stat='stddev', mask=None):
298        """
299        Determine the specified statistic of the current beam/if/pol
300        Takes a 'mask' as an optional parameter to specify which
301        channels should be excluded.
302        Parameters:
303            stat:    'min', 'max', 'sumsq', 'sum', 'mean'
304                     'var', 'stddev', 'avdev', 'rms', 'median'
305            mask:    an optional mask specifying where the statistic
306                     should be determined.
307        Example:
308            scan.set_unit('channel')
309            msk = scan.create_mask([100, 200], [500, 600])
310            scan.stats(stat='mean', mask=m)
311        """
312        if mask == None:
313            mask = []
314        axes = ['Beam', 'IF', 'Pol', 'Time']
315        if not self._check_ifs():
316            raise ValueError("Cannot apply mask as the IFs have different "
317                             "number of channels. Please use setselection() "
318                             "to select individual IFs")
319
320        statvals = self._math._stats(self, mask, stat)
321        out = ''
322        axes = []
323        for i in range(self.nrow()):
324            axis = []
325            axis.append(self.getscan(i))
326            axis.append(self.getbeam(i))
327            axis.append(self.getif(i))
328            axis.append(self.getpol(i))
329            axis.append(self.getcycle(i))
330            axes.append(axis)
331            tm = self._gettime(i)
332            src = self._getsourcename(i)
333            out += 'Scan[%d] (%s) ' % (axis[0], src)
334            out += 'Time[%s]:\n' % (tm)
335            if self.nbeam(-1) > 1: out +=  ' Beam[%d] ' % (axis[1])
336            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (axis[2])
337            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (axis[3])
338            out += '= %3.3f\n' % (statvals[i])
339            out +=  "--------------------------------------------------\n"
340
341        if rcParams['verbose']:
342            print "--------------------------------------------------"
343            print " ", stat
344            print "--------------------------------------------------"
345            print out
346        retval = { 'axesnames': ['scanno', 'beamno', 'ifno', 'polno', 'cycleno'],
347                   'axes' : axes,
348                   'data': statvals}
349        return retval
350
351    def stddev(self, mask=None):
352        """
353        Determine the standard deviation of the current beam/if/pol
354        Takes a 'mask' as an optional parameter to specify which
355        channels should be excluded.
356        Parameters:
357            mask:    an optional mask specifying where the standard
358                     deviation should be determined.
359
360        Example:
361            scan.set_unit('channel')
362            msk = scan.create_mask([100, 200], [500, 600])
363            scan.stddev(mask=m)
364        """
365        return self.stats(stat='stddev', mask=mask);
366
367
368    def column_names(self):
369        """
370        Return a  list of column names, which can be used for selection.
371        """
372        return list(Scantable.column_names(self))
373
374    def get_tsys(self):
375        """
376        Return the System temperatures.
377        Parameters:
378
379        Returns:
380            a list of Tsys values for the current selection
381        """
382
383        return self._row_callback(self._gettsys, "Tsys")
384
385    def _row_callback(self, callback, label):
386        axes = []
387        axesnames = ['scanno', 'beamno', 'ifno', 'polno', 'cycleno']
388        out = ""
389        outvec = []
390        for i in range(self.nrow()):
391            axis = []
392            axis.append(self.getscan(i))
393            axis.append(self.getbeam(i))
394            axis.append(self.getif(i))
395            axis.append(self.getpol(i))
396            axis.append(self.getcycle(i))
397            axes.append(axis)
398            tm = self._gettime(i)
399            src = self._getsourcename(i)
400            out += 'Scan[%d] (%s) ' % (axis[0], src)
401            out += 'Time[%s]:\n' % (tm)
402            if self.nbeam(-1) > 1: out +=  ' Beam[%d] ' % (axis[1])
403            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (axis[2])
404            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (axis[3])
405            outvec.append(callback(i))
406            out += '= %3.3f\n' % (outvec[i])
407            out +=  "--------------------------------------------------\n"
408        if rcParams['verbose']:
409            print "--------------------------------------------------"
410            print " %s" % (label)
411            print "--------------------------------------------------"
412            print out
413        # disabled because the vector seems more useful
414        #retval = {'axesnames': axesnames, 'axes': axes, 'data': outvec}
415        return outvec
416
417    def _get_column(self, callback, row=-1):
418        """
419        """
420        if row == -1:
421            return [callback(i) for i in range(self.nrow())]
422        else:
423            if  0 <= row < self.nrow():
424                return callback(row)
425
426
427    def get_time(self, row=-1):
428        """
429        Get a list of time stamps for the observations.
430        Return a string for each integration in the scantable.
431        Parameters:
432            row:    row no of integration. Default -1 return all rows
433        Example:
434            none
435        """
436        from time import strptime
437        from datetime import datetime
438        times = self._get_column(self._gettime, row)
439        format = "%Y/%m/%d/%H:%M:%S"
440        if isinstance(times, list):
441            return [datetime(*strptime(i, format)[:6]) for i in times]
442        else:
443            return datetime(*strptime(times, format)[:6])
444
445    def get_sourcename(self, row=-1):
446        """
447        Get a list source names for the observations.
448        Return a string for each integration in the scantable.
449        Parameters:
450            row:    row no of integration. Default -1 return all rows
451        Example:
452            none
453        """
454        return self._get_column(self._getsourcename, row)
455
456    def get_elevation(self, row=-1):
457        """
458        Get a list of elevations for the observations.
459        Return a float for each integration in the scantable.
460        Parameters:
461            row:    row no of integration. Default -1 return all rows
462        Example:
463            none
464        """
465        return self._get_column(self._getelevation, row)
466
467    def get_azimuth(self, row=-1):
468        """
469        Get a list of azimuths for the observations.
470        Return a float for each integration in the scantable.
471        Parameters:
472            row:    row no of integration. Default -1 return all rows
473        Example:
474            none
475        """
476        return self._get_column(self._getazimuth, row)
477
478    def get_parangle(self, row=-1):
479        """
480        Get a list of parallactic angles for the observations.
481        Return a float for each integration in the scantable.
482        Parameters:
483            row:    row no of integration. Default -1 return all rows
484        Example:
485            none
486        """
487        return self._get_column(self._getparangle, row)
488
489    def get_direction(self, row=-1):
490        """
491        Get a list of Positions on the sky (direction) for the observations.
492        Return a float for each integration in the scantable.
493        Parameters:
494            row:    row no of integration. Default -1 return all rows
495        Example:
496            none
497        """
498        return self._get_column(self._getdirection, row)
499
500    def set_unit(self, unit='channel'):
501        """
502        Set the unit for all following operations on this scantable
503        Parameters:
504            unit:    optional unit, default is 'channel'
505                     one of '*Hz', 'km/s', 'channel', ''
506        """
507        varlist = vars()
508        if unit in ['', 'pixel', 'channel']:
509            unit = ''
510        inf = list(self._getcoordinfo())
511        inf[0] = unit
512        self._setcoordinfo(inf)
513        self._add_history("set_unit", varlist)
514
515    def set_instrument(self, instr):
516        """
517        Set the instrument for subsequent processing
518        Parameters:
519            instr:    Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
520                      'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
521        """
522        self._setInstrument(instr)
523        self._add_history("set_instument", vars())
524        print_log()
525
526    def set_feedtype(self, feedtype):
527        """
528        Overwrite the feed type, which might not be set correctly.
529        Parameters:
530            feedtype:     'linear' or 'circular'
531        """
532        self._setfeedtype(feedtype)
533        self._add_history("set_feedtype", vars())
534        print_log()
535
536    def set_doppler(self, doppler='RADIO'):
537        """
538        Set the doppler for all following operations on this scantable.
539        Parameters:
540            doppler:    One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
541        """
542        varlist = vars()
543        inf = list(self._getcoordinfo())
544        inf[2] = doppler
545        self._setcoordinfo(inf)
546        self._add_history("set_doppler", vars())
547        print_log()
548
549    def set_freqframe(self, frame=None):
550        """
551        Set the frame type of the Spectral Axis.
552        Parameters:
553            frame:   an optional frame type, default 'LSRK'. Valid frames are:
554                     'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
555                     'GEO', 'GALACTO', 'LGROUP', 'CMB'
556        Examples:
557            scan.set_freqframe('BARY')
558        """
559        if frame is None: frame = rcParams['scantable.freqframe']
560        varlist = vars()
561        valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
562                   'GEO', 'GALACTO', 'LGROUP', 'CMB']
563
564        if frame in valid:
565            inf = list(self._getcoordinfo())
566            inf[1] = frame
567            self._setcoordinfo(inf)
568            self._add_history("set_freqframe", varlist)
569        else:
570            msg  = "Please specify a valid freq type. Valid types are:\n", valid
571            if rcParams['verbose']:
572                print msg
573            else:
574                raise TypeError(msg)
575        print_log()
576
577    def set_dirframe(self, frame=""):
578        """
579        Set the frame type of the Direction on the sky.
580        Parameters:
581            frame:   an optional frame type, default ''. Valid frames are:
582                     'J2000', 'B1950', 'GALACTIC'
583        Examples:
584            scan.set_dirframe('GALACTIC')
585        """
586        varlist = vars()
587        try:
588            Scantable.set_dirframe(self, frame)
589        except RuntimeError, msg:
590            if rcParams['verbose']:
591                print msg
592            else:
593                raise
594        self._add_history("set_dirframe", varlist)
595
596    def get_unit(self):
597        """
598        Get the default unit set in this scantable
599        Parameters:
600        Returns:
601            A unit string
602        """
603        inf = self._getcoordinfo()
604        unit = inf[0]
605        if unit == '': unit = 'channel'
606        return unit
607
608    def get_abcissa(self, rowno=0):
609        """
610        Get the abcissa in the current coordinate setup for the currently
611        selected Beam/IF/Pol
612        Parameters:
613            rowno:    an optional row number in the scantable. Default is the
614                      first row, i.e. rowno=0
615        Returns:
616            The abcissa values and it's format string (as a dictionary)
617        """
618        abc = self._getabcissa(rowno)
619        lbl = self._getabcissalabel(rowno)
620        print_log()
621        return abc, lbl
622
623    def flag(self, mask=None):
624        """
625        Flag the selected data using an optional channel mask.
626        Parameters:
627            mask:   an optional channel mask, created with create_mask. Default
628                    (no mask) is all channels.
629        """
630        varlist = vars()
631        if mask is None:
632            mask = []
633        try:
634            self._flag(mask)
635        except RuntimeError, msg:
636            if rcParams['verbose']:
637                print msg
638                return
639            else: raise
640        self._add_history("flag", varlist)
641
642    def lag_flag(self, frequency, unit="GHz", width=0, insitu=None):
643        """
644        Flag the data in 'lag' space by providing a frequency to remove.
645        Flagged data in the scantable gets set to 0.0 before the fft.
646        No taper is applied.
647        Parameters:
648            frequency:    the frequency to remove
649            unit:         the frequency unit ()default "GHz")
650            width:        the number of 'lag' channels to extent the frequency
651                          by, i.e. the interval
652                          [bandwidth/frequency-width, bandwidth/frequency+width]
653        """
654        if insitu is None: insitu = rcParams['insitu']
655        self._math._setinsitu(insitu)
656        varlist = vars()
657        base = { "GHz": 100000000., "MHz": 1000000., "kHz": 1000., "Hz": 1. }
658        if not base.has_key(unit):
659            raise ValueError("%s is not a valid unit." % unit)
660        try:
661            s = scantable(self._math._lag_flag(self, frequency*base[unit], width))
662        except RuntimeError, msg:
663            if rcParams['verbose']:
664                print msg
665                return
666            else: raise
667        s._add_history("lag_flag", varlist)
668        print_log()
669        if insitu:
670            self._assign(s)
671        else:
672            return s
673
674
675    def create_mask(self, *args, **kwargs):
676        """
677        Compute and return a mask based on [min, max] windows.
678        The specified windows are to be INCLUDED, when the mask is
679        applied.
680        Parameters:
681            [min, max], [min2, max2], ...
682                Pairs of start/end points (inclusive)specifying the regions
683                to be masked
684            invert:     optional argument. If specified as True,
685                        return an inverted mask, i.e. the regions
686                        specified are EXCLUDED
687            row:        create the mask using the specified row for
688                        unit conversions, default is row=0
689                        only necessary if frequency varies over rows.
690        Example:
691            scan.set_unit('channel')
692            a)
693            msk = scan.create_mask([400, 500], [800, 900])
694            # masks everything outside 400 and 500
695            # and 800 and 900 in the unit 'channel'
696
697            b)
698            msk = scan.create_mask([400, 500], [800, 900], invert=True)
699            # masks the regions between 400 and 500
700            # and 800 and 900 in the unit 'channel'
701            c)
702            mask only channel 400
703            msk =  scan.create_mask([400, 400])
704        """
705        row = 0
706        if kwargs.has_key("row"):
707            row = kwargs.get("row")
708        data = self._getabcissa(row)
709        u = self._getcoordinfo()[0]
710        if rcParams['verbose']:
711            if u == "": u = "channel"
712            msg = "The current mask window unit is %s" % u
713            i = self._check_ifs()
714            if not i:
715                msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
716            asaplog.push(msg)
717        n = self.nchan()
718        msk = NUM.zeros(n)
719        # test if args is a 'list' or a 'normal *args - UGLY!!!
720
721        ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
722             and args or args[0]
723        for window in ws:
724            if (len(window) != 2 or window[0] > window[1] ):
725                raise TypeError("A window needs to be defined as [min, max]")
726            for i in range(n):
727                if data[i] >= window[0] and data[i] <= window[1]:
728                    msk[i] = 1
729        if kwargs.has_key('invert'):
730            if kwargs.get('invert'):
731                msk = NUM.logical_not(msk)
732        print_log()
733        return msk
734
735    def get_restfreqs(self):
736        """
737        Get the restfrequency(s) stored in this scantable.
738        The return value(s) are always of unit 'Hz'
739        Parameters:
740            none
741        Returns:
742            a list of doubles
743        """
744        return list(self._getrestfreqs())
745
746
747    def set_restfreqs(self, freqs=None, unit='Hz'):
748        """
749        Set or replace the restfrequency specified and
750        If the 'freqs' argument holds a scalar,
751        then that rest frequency will be applied to all the selected
752        data.  If the 'freqs' argument holds
753        a vector, then it MUST be of equal or smaller length than
754        the number of IFs (and the available restfrequencies will be
755        replaced by this vector).  In this case, *all* data have
756        the restfrequency set per IF according
757        to the corresponding value you give in the 'freqs' vector.
758        E.g. 'freqs=[1e9, 2e9]'  would mean IF 0 gets restfreq 1e9 and
759        IF 1 gets restfreq 2e9.
760        You can also specify the frequencies via a linecatalog/
761
762        Parameters:
763            freqs:   list of rest frequency values or string idenitfiers
764            unit:    unit for rest frequency (default 'Hz')
765
766        Example:
767            # set the given restfrequency for the whole table
768            scan.set_restfreqs(freqs=1.4e9)
769            # If thee number of IFs in the data is >= 2 the IF0 gets the first
770            # value IF1 the second...
771            scan.set_restfreqs(freqs=[1.4e9, 1.67e9])
772            #set the given restfrequency for the whole table (by name)
773            scan.set_restfreqs(freqs="OH1667")
774
775        Note:
776            To do more sophisticate Restfrequency setting, e.g. on a
777            source and IF basis, use scantable.set_selection() before using
778            this function.
779            # provide your scantable is call scan
780            selection = selector()
781            selection.set_name("ORION*")
782            selection.set_ifs([1])
783            scan.set_selection(selection)
784            scan.set_restfreqs(freqs=86.6e9)
785
786        """
787        varlist = vars()
788        from asap import linecatalog
789        # simple  value
790        if isinstance(freqs, int) or isinstance(freqs, float):
791            self._setrestfreqs(freqs, "",unit)
792        # list of values
793        elif isinstance(freqs, list) or isinstance(freqs, tuple):
794            # list values are scalars
795            if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
796                sel = selector()
797                savesel = self._getselection()
798                for i in xrange(len(freqs)):
799                    sel.set_ifs([i])
800                    self._setselection(sel)
801                    self._setrestfreqs(freqs[i], "",unit)
802                self._setselection(savesel)
803            # list values are tuples, (value, name)
804            elif isinstance(freqs[-1], dict):
805                sel = selector()
806                savesel = self._getselection()
807                for i in xrange(len(freqs)):
808                    sel.set_ifs([i])
809                    self._setrestfreqs(freqs[i]["value"],
810                                       freqs[i]["name"], "MHz")
811                    self._setselection(sel)
812                self._setselection(savesel)
813        # freqs are to be taken from a linecatalog
814        elif isinstance(freqs, linecatalog):
815            sel = selector()
816            savesel = self._getselection()
817            for i in xrange(freqs.nrow()):
818                sel.set_ifs([i])
819                self._setselection(sel)
820                self._setrestfreqs(freqs.get_frequency(i),
821                                   freqs.get_name(i), "MHz")
822                # ensure that we are not iterating past nIF
823                if i == self.nif()-1: break
824            self._setselection(savesel)
825        else:
826            return
827        self._add_history("set_restfreqs", varlist)
828
829
830
831    def history(self):
832        hist = list(self._gethistory())
833        out = "-"*80
834        for h in hist:
835            if h.startswith("---"):
836                out += "\n"+h
837            else:
838                items = h.split("##")
839                date = items[0]
840                func = items[1]
841                items = items[2:]
842                out += "\n"+date+"\n"
843                out += "Function: %s\n  Parameters:" % (func)
844                for i in items:
845                    s = i.split("=")
846                    out += "\n   %s = %s" % (s[0], s[1])
847                out += "\n"+"-"*80
848        try:
849            from IPython.genutils import page as pager
850        except ImportError:
851            from pydoc import pager
852        pager(out)
853        return
854
855    #
856    # Maths business
857    #
858
859    def average_time(self, mask=None, scanav=False, weight='tint', align=False):
860        """
861        Return the (time) weighted average of a scan.
862        Note:
863            in channels only - align if necessary
864        Parameters:
865            mask:     an optional mask (only used for 'var' and 'tsys'
866                      weighting)
867            scanav:   True averages each scan separately
868                      False (default) averages all scans together,
869            weight:   Weighting scheme.
870                      'none'     (mean no weight)
871                      'var'      (1/var(spec) weighted)
872                      'tsys'     (1/Tsys**2 weighted)
873                      'tint'     (integration time weighted)
874                      'tintsys'  (Tint/Tsys**2)
875                      'median'   ( median averaging)
876                      The default is 'tint'
877            align:    align the spectra in velocity before averaging. It takes
878                      the time of the first spectrum as reference time.
879        Example:
880            # time average the scantable without using a mask
881            newscan = scan.average_time()
882        """
883        varlist = vars()
884        if weight is None: weight = 'TINT'
885        if mask is None: mask = ()
886        if scanav: scanav = "SCAN"
887        else: scanav = "NONE"
888        scan = (self, )
889        try:
890            if align:
891                scan = (self.freq_align(insitu=False), )
892            s = None
893            if weight.upper() == 'MEDIAN':
894                s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
895                                                         scanav))
896            else:
897                s = scantable(self._math._average(scan, mask, weight.upper(),
898                              scanav))
899        except RuntimeError, msg:
900            if rcParams['verbose']:
901                print msg
902                return
903            else: raise
904        s._add_history("average_time", varlist)
905        print_log()
906        return s
907
908    def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
909        """
910        Return a scan where all spectra are converted to either
911        Jansky or Kelvin depending upon the flux units of the scan table.
912        By default the function tries to look the values up internally.
913        If it can't find them (or if you want to over-ride), you must
914        specify EITHER jyperk OR eta (and D which it will try to look up
915        also if you don't set it). jyperk takes precedence if you set both.
916        Parameters:
917            jyperk:      the Jy / K conversion factor
918            eta:         the aperture efficiency
919            d:           the geomtric diameter (metres)
920            insitu:      if False a new scantable is returned.
921                         Otherwise, the scaling is done in-situ
922                         The default is taken from .asaprc (False)
923            allaxes:         if True apply to all spectra. Otherwise
924                         apply only to the selected (beam/pol/if)spectra only
925                         The default is taken from .asaprc (True if none)
926        """
927        if insitu is None: insitu = rcParams['insitu']
928        self._math._setinsitu(insitu)
929        varlist = vars()
930        if jyperk is None: jyperk = -1.0
931        if d is None: d = -1.0
932        if eta is None: eta = -1.0
933        s = scantable(self._math._convertflux(self, d, eta, jyperk))
934        s._add_history("convert_flux", varlist)
935        print_log()
936        if insitu: self._assign(s)
937        else: return s
938
939    def gain_el(self, poly=None, filename="", method="linear", insitu=None):
940        """
941        Return a scan after applying a gain-elevation correction.
942        The correction can be made via either a polynomial or a
943        table-based interpolation (and extrapolation if necessary).
944        You specify polynomial coefficients, an ascii table or neither.
945        If you specify neither, then a polynomial correction will be made
946        with built in coefficients known for certain telescopes (an error
947        will occur if the instrument is not known).
948        The data and Tsys are *divided* by the scaling factors.
949        Parameters:
950            poly:        Polynomial coefficients (default None) to compute a
951                         gain-elevation correction as a function of
952                         elevation (in degrees).
953            filename:    The name of an ascii file holding correction factors.
954                         The first row of the ascii file must give the column
955                         names and these MUST include columns
956                         "ELEVATION" (degrees) and "FACTOR" (multiply data
957                         by this) somewhere.
958                         The second row must give the data type of the
959                         column. Use 'R' for Real and 'I' for Integer.
960                         An example file would be
961                         (actual factors are arbitrary) :
962
963                         TIME ELEVATION FACTOR
964                         R R R
965                         0.1 0 0.8
966                         0.2 20 0.85
967                         0.3 40 0.9
968                         0.4 60 0.85
969                         0.5 80 0.8
970                         0.6 90 0.75
971            method:      Interpolation method when correcting from a table.
972                         Values are  "nearest", "linear" (default), "cubic"
973                         and "spline"
974            insitu:      if False a new scantable is returned.
975                         Otherwise, the scaling is done in-situ
976                         The default is taken from .asaprc (False)
977        """
978
979        if insitu is None: insitu = rcParams['insitu']
980        self._math._setinsitu(insitu)
981        varlist = vars()
982        if poly is None:
983            poly = ()
984        from os.path import expandvars
985        filename = expandvars(filename)
986        s = scantable(self._math._gainel(self, poly, filename, method))
987        s._add_history("gain_el", varlist)
988        print_log()
989        if insitu: self._assign(s)
990        else: return s
991
992    def freq_align(self, reftime=None, method='cubic', insitu=None):
993        """
994        Return a scan where all rows have been aligned in frequency/velocity.
995        The alignment frequency frame (e.g. LSRK) is that set by function
996        set_freqframe.
997        Parameters:
998            reftime:     reference time to align at. By default, the time of
999                         the first row of data is used.
1000            method:      Interpolation method for regridding the spectra.
1001                         Choose from "nearest", "linear", "cubic" (default)
1002                         and "spline"
1003            insitu:      if False a new scantable is returned.
1004                         Otherwise, the scaling is done in-situ
1005                         The default is taken from .asaprc (False)
1006        """
1007        if insitu is None: insitu = rcParams["insitu"]
1008        self._math._setinsitu(insitu)
1009        varlist = vars()
1010        if reftime is None: reftime = ""
1011        s = scantable(self._math._freq_align(self, reftime, method))
1012        s._add_history("freq_align", varlist)
1013        print_log()
1014        if insitu: self._assign(s)
1015        else: return s
1016
1017    def opacity(self, tau, insitu=None):
1018        """
1019        Apply an opacity correction. The data
1020        and Tsys are multiplied by the correction factor.
1021        Parameters:
1022            tau:         Opacity from which the correction factor is
1023                         exp(tau*ZD)
1024                         where ZD is the zenith-distance
1025            insitu:      if False a new scantable is returned.
1026                         Otherwise, the scaling is done in-situ
1027                         The default is taken from .asaprc (False)
1028        """
1029        if insitu is None: insitu = rcParams['insitu']
1030        self._math._setinsitu(insitu)
1031        varlist = vars()
1032        s = scantable(self._math._opacity(self, tau))
1033        s._add_history("opacity", varlist)
1034        print_log()
1035        if insitu: self._assign(s)
1036        else: return s
1037
1038    def bin(self, width=5, insitu=None):
1039        """
1040        Return a scan where all spectra have been binned up.
1041            width:       The bin width (default=5) in pixels
1042            insitu:      if False a new scantable is returned.
1043                         Otherwise, the scaling is done in-situ
1044                         The default is taken from .asaprc (False)
1045        """
1046        if insitu is None: insitu = rcParams['insitu']
1047        self._math._setinsitu(insitu)
1048        varlist = vars()
1049        s = scantable(self._math._bin(self, width))
1050        s._add_history("bin", varlist)
1051        print_log()
1052        if insitu: self._assign(s)
1053        else: return s
1054
1055
1056    def resample(self, width=5, method='cubic', insitu=None):
1057        """
1058        Return a scan where all spectra have been binned up
1059            width:       The bin width (default=5) in pixels
1060            method:      Interpolation method when correcting from a table.
1061                         Values are  "nearest", "linear", "cubic" (default)
1062                         and "spline"
1063            insitu:      if False a new scantable is returned.
1064                         Otherwise, the scaling is done in-situ
1065                         The default is taken from .asaprc (False)
1066        """
1067        if insitu is None: insitu = rcParams['insitu']
1068        self._math._setinsitu(insitu)
1069        varlist = vars()
1070        s = scantable(self._math._resample(self, method, width))
1071        s._add_history("resample", varlist)
1072        print_log()
1073        if insitu: self._assign(s)
1074        else: return s
1075
1076
1077    def average_pol(self, mask=None, weight='none'):
1078        """
1079        Average the Polarisations together.
1080        Parameters:
1081            mask:        An optional mask defining the region, where the
1082                         averaging will be applied. The output will have all
1083                         specified points masked.
1084            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1085                         weighted), or 'tsys' (1/Tsys**2 weighted)
1086        """
1087        varlist = vars()
1088        if mask is None:
1089            mask = ()
1090        s = scantable(self._math._averagepol(self, mask, weight.upper()))
1091        s._add_history("average_pol", varlist)
1092        print_log()
1093        return s
1094
1095    def average_beam(self, mask=None, weight='none'):
1096        """
1097        Average the Beams together.
1098        Parameters:
1099            mask:        An optional mask defining the region, where the
1100                         averaging will be applied. The output will have all
1101                         specified points masked.
1102            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1103                         weighted), or 'tsys' (1/Tsys**2 weighted)
1104        """
1105        varlist = vars()
1106        if mask is None:
1107            mask = ()
1108        s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1109        s._add_history("average_beam", varlist)
1110        print_log()
1111        return s
1112
1113    def convert_pol(self, poltype=None):
1114        """
1115        Convert the data to a different polarisation type.
1116        Parameters:
1117            poltype:    The new polarisation type. Valid types are:
1118                        "linear", "stokes" and "circular"
1119        """
1120        varlist = vars()
1121        try:
1122            s = scantable(self._math._convertpol(self, poltype))
1123        except RuntimeError, msg:
1124            if rcParams['verbose']:
1125                print msg
1126                return
1127            else:
1128                raise
1129        s._add_history("convert_pol", varlist)
1130        print_log()
1131        return s
1132
1133    def smooth(self, kernel="hanning", width=5.0, insitu=None):
1134        """
1135        Smooth the spectrum by the specified kernel (conserving flux).
1136        Parameters:
1137            scan:       The input scan
1138            kernel:     The type of smoothing kernel. Select from
1139                        'hanning' (default), 'gaussian' and 'boxcar'.
1140                        The first three characters are sufficient.
1141            width:      The width of the kernel in pixels. For hanning this is
1142                        ignored otherwise it defauls to 5 pixels.
1143                        For 'gaussian' it is the Full Width Half
1144                        Maximum. For 'boxcar' it is the full width.
1145            insitu:     if False a new scantable is returned.
1146                        Otherwise, the scaling is done in-situ
1147                        The default is taken from .asaprc (False)
1148        Example:
1149             none
1150        """
1151        if insitu is None: insitu = rcParams['insitu']
1152        self._math._setinsitu(insitu)
1153        varlist = vars()
1154        s = scantable(self._math._smooth(self, kernel.lower(), width))
1155        s._add_history("smooth", varlist)
1156        print_log()
1157        if insitu: self._assign(s)
1158        else: return s
1159
1160
1161    def poly_baseline(self, mask=None, order=0, plot=False, insitu=None):
1162        """
1163        Return a scan which has been baselined (all rows) by a polynomial.
1164        Parameters:
1165            scan:       a scantable
1166            mask:       an optional mask
1167            order:      the order of the polynomial (default is 0)
1168            plot:       plot the fit and the residual. In this each
1169                        indivual fit has to be approved, by typing 'y'
1170                        or 'n'
1171            insitu:     if False a new scantable is returned.
1172                        Otherwise, the scaling is done in-situ
1173                        The default is taken from .asaprc (False)
1174        Example:
1175            # return a scan baselined by a third order polynomial,
1176            # not using a mask
1177            bscan = scan.poly_baseline(order=3)
1178        """
1179        if insitu is None: insitu = rcParams['insitu']
1180        varlist = vars()
1181        if mask is None:
1182            mask = list(NUM.ones(self.nchan(-1)))
1183        from asap.asapfitter import fitter
1184        f = fitter()
1185        f.set_scan(self, mask)
1186        f.set_function(poly=order)
1187        s = f.auto_fit(insitu, plot=plot)
1188        s._add_history("poly_baseline", varlist)
1189        print_log()
1190        if insitu: self._assign(s)
1191        else: return s
1192
1193    def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
1194                           threshold=3, plot=False, insitu=None):
1195        """
1196        Return a scan which has been baselined (all rows) by a polynomial.
1197        Spectral lines are detected first using linefinder and masked out
1198        to avoid them affecting the baseline solution.
1199
1200        Parameters:
1201            mask:       an optional mask retreived from scantable
1202            edge:       an optional number of channel to drop at
1203                        the edge of spectrum. If only one value is
1204                        specified, the same number will be dropped from
1205                        both sides of the spectrum. Default is to keep
1206                        all channels. Nested tuples represent individual
1207                        edge selection for different IFs (a number of spectral
1208                        channels can be different)
1209            order:      the order of the polynomial (default is 0)
1210            threshold:  the threshold used by line finder. It is better to
1211                        keep it large as only strong lines affect the
1212                        baseline solution.
1213            plot:       plot the fit and the residual. In this each
1214                        indivual fit has to be approved, by typing 'y'
1215                        or 'n'
1216            insitu:     if False a new scantable is returned.
1217                        Otherwise, the scaling is done in-situ
1218                        The default is taken from .asaprc (False)
1219
1220        Example:
1221            scan2=scan.auto_poly_baseline(order=7)
1222        """
1223        if insitu is None: insitu = rcParams['insitu']
1224        varlist = vars()
1225        from asap.asapfitter import fitter
1226        from asap.asaplinefind import linefinder
1227        from asap import _is_sequence_or_number as _is_valid
1228
1229        # check whether edge is set up for each IF individually
1230        individualedge = False;
1231        if len(edge) > 1:
1232            if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1233                individualedge = True;
1234
1235        if not _is_valid(edge, int) and not individualedge:
1236            raise ValueError, "Parameter 'edge' has to be an integer or a \
1237            pair of integers specified as a tuple. Nested tuples are allowed \
1238            to make individual selection for different IFs."
1239
1240        curedge = (0, 0)
1241        if individualedge:
1242            for edgepar in edge:
1243                if not _is_valid(edgepar, int):
1244                    raise ValueError, "Each element of the 'edge' tuple has \
1245                                       to be a pair of integers or an integer."
1246        else:
1247            curedge = edge;
1248
1249        # setup fitter
1250        f = fitter()
1251        f.set_function(poly=order)
1252
1253        # setup line finder
1254        fl = linefinder()
1255        fl.set_options(threshold=threshold)
1256
1257        if not insitu:
1258            workscan = self.copy()
1259        else:
1260            workscan = self
1261
1262        fl.set_scan(workscan)
1263
1264        rows = range(workscan.nrow())
1265        asaplog.push("Processing:")
1266        for r in rows:
1267            msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1268                (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1269                 workscan.getpol(r), workscan.getcycle(r))
1270            asaplog.push(msg, False)
1271
1272            # figure out edge parameter
1273            if individualedge:
1274                if len(edge) >= workscan.getif(r):
1275                    raise RuntimeError, "Number of edge elements appear to " \
1276                                        "be less than the number of IFs"
1277                    curedge = edge[workscan.getif(r)]
1278
1279            # setup line finder
1280            fl.find_lines(r, mask, curedge)
1281            f.set_scan(workscan, fl.get_mask())
1282            f.x = workscan._getabcissa(r)
1283            f.y = workscan._getspectrum(r)
1284            f.data = None
1285            f.fit()
1286            x = f.get_parameters()
1287            if plot:
1288                f.plot(residual=True)
1289                x = raw_input("Accept fit ( [y]/n ): ")
1290                if x.upper() == 'N':
1291                    continue
1292            workscan._setspectrum(f.fitter.getresidual(), r)
1293        if plot:
1294            f._p.unmap()
1295            f._p = None
1296        workscan._add_history("auto_poly_baseline", varlist)
1297        if insitu:
1298            self._assign(workscan)
1299        else:
1300            return workscan
1301
1302    def rotate_linpolphase(self, angle):
1303        """
1304        Rotate the phase of the complex polarization O=Q+iU correlation.
1305        This is always done in situ in the raw data.  So if you call this
1306        function more than once then each call rotates the phase further.
1307        Parameters:
1308            angle:   The angle (degrees) to rotate (add) by.
1309        Examples:
1310            scan.rotate_linpolphase(2.3)
1311        """
1312        varlist = vars()
1313        self._math._rotate_linpolphase(self, angle)
1314        self._add_history("rotate_linpolphase", varlist)
1315        print_log()
1316        return
1317
1318
1319    def rotate_xyphase(self, angle):
1320        """
1321        Rotate the phase of the XY correlation.  This is always done in situ
1322        in the data.  So if you call this function more than once
1323        then each call rotates the phase further.
1324        Parameters:
1325            angle:   The angle (degrees) to rotate (add) by.
1326        Examples:
1327            scan.rotate_xyphase(2.3)
1328        """
1329        varlist = vars()
1330        self._math._rotate_xyphase(self, angle)
1331        self._add_history("rotate_xyphase", varlist)
1332        print_log()
1333        return
1334
1335    def swap_linears(self):
1336        """
1337        Swap the linear polarisations XX and YY
1338        """
1339        varlist = vars()
1340        self._math._swap_linears(self)
1341        self._add_history("swap_linears", varlist)
1342        print_log()
1343        return
1344
1345    def invert_phase(self):
1346        """
1347        Invert the phase of the complex polarisation
1348        """
1349        varlist = vars()
1350        self._math._invert_phase(self)
1351        self._add_history("invert_phase", varlist)
1352        print_log()
1353        return
1354
1355    def add(self, offset, insitu=None):
1356        """
1357        Return a scan where all spectra have the offset added
1358        Parameters:
1359            offset:      the offset
1360            insitu:      if False a new scantable is returned.
1361                         Otherwise, the scaling is done in-situ
1362                         The default is taken from .asaprc (False)
1363        """
1364        if insitu is None: insitu = rcParams['insitu']
1365        self._math._setinsitu(insitu)
1366        varlist = vars()
1367        s = scantable(self._math._unaryop(self, offset, "ADD", False))
1368        s._add_history("add", varlist)
1369        print_log()
1370        if insitu:
1371            self._assign(s)
1372        else:
1373            return s
1374
1375    def scale(self, factor, tsys=True, insitu=None, ):
1376        """
1377        Return a scan where all spectra are scaled by the give 'factor'
1378        Parameters:
1379            factor:      the scaling factor
1380            insitu:      if False a new scantable is returned.
1381                         Otherwise, the scaling is done in-situ
1382                         The default is taken from .asaprc (False)
1383            tsys:        if True (default) then apply the operation to Tsys
1384                         as well as the data
1385        """
1386        if insitu is None: insitu = rcParams['insitu']
1387        self._math._setinsitu(insitu)
1388        varlist = vars()
1389        s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
1390        s._add_history("scale", varlist)
1391        print_log()
1392        if insitu:
1393            self._assign(s)
1394        else:
1395            return s
1396
1397    def auto_quotient(self, mode='time', preserve=True):
1398        """
1399        This function allows to build quotients automatically.
1400        It assumes the observation to have the same numer of
1401        "ons" and "offs"
1402        It will support "closest off in time" in the future
1403        Parameters:
1404            mode:           the on/off detection mode; 'suffix' (default)
1405                            'suffix' identifies 'off' scans by the
1406                            trailing '_R' (Mopra/Parkes) or
1407                            '_e'/'_w' (Tid)
1408            preserve:       you can preserve (default) the continuum or
1409                            remove it.  The equations used are
1410                            preserve: Output = Toff * (on/off) - Toff
1411                            remove:   Output = Toff * (on/off) - Ton
1412        """
1413        modes = ["time"]
1414        if not mode in modes:
1415            msg = "please provide valid mode. Valid modes are %s" % (modes)
1416            raise ValueError(msg)
1417        varlist = vars()
1418        s = scantable(self._math._auto_quotient(self, mode, preserve))
1419        s._add_history("auto_quotient", varlist)
1420        print_log()
1421        return s
1422
1423    def mx_quotient(self, mask = None, weight='median', preserve=True):
1424        """
1425        Form a quotient using "off" beams when observing in "MX" mode.
1426        Parameters:
1427            mask:           an optional mask to be used when weight == 'stddev'
1428            weight:         How to average the off beams.  Default is 'median'.
1429            preserve:       you can preserve (default) the continuum or
1430                            remove it.  The equations used are
1431                            preserve: Output = Toff * (on/off) - Toff
1432                            remove:   Output = Toff * (on/off) - Ton
1433        """
1434        if mask is None: mask = ()
1435        varlist = vars()
1436        on = scantable(self._math._mx_extract(self, 'on'))
1437        preoff = scantable(self._math._mx_extract(self, 'off'))
1438        off = preoff.average_time(mask=mask, weight=weight, scanav=False)
1439        from asapmath  import quotient
1440        q = quotient(on, off, preserve)
1441        q._add_history("mx_quotient", varlist)
1442        print_log()
1443        return q
1444
1445    def freq_switch(self, insitu=None):
1446        """
1447        Apply frequency switching to the data.
1448        Parameters:
1449            insitu:      if False a new scantable is returned.
1450                         Otherwise, the swictching is done in-situ
1451                         The default is taken from .asaprc (False)
1452        Example:
1453            none
1454        """
1455        if insitu is None: insitu = rcParams['insitu']
1456        self._math._setinsitu(insitu)
1457        varlist = vars()
1458        s = scantable(self._math._freqswitch(self))
1459        s._add_history("freq_switch", varlist)
1460        print_log()
1461        if insitu: self._assign(s)
1462        else: return s
1463
1464    def recalc_azel(self):
1465        """
1466        Recalculate the azimuth and elevation for each position.
1467        Parameters:
1468            none
1469        Example:
1470        """
1471        varlist = vars()
1472        self._recalcazel()
1473        self._add_history("recalc_azel", varlist)
1474        print_log()
1475        return
1476
1477    def __add__(self, other):
1478        varlist = vars()
1479        s = None
1480        if isinstance(other, scantable):
1481            print "scantable + scantable NYI"
1482            return
1483        elif isinstance(other, float):
1484            s = scantable(self._math._unaryop(self, other, "ADD", False))
1485        else:
1486            raise TypeError("Other input is not a scantable or float value")
1487        s._add_history("operator +", varlist)
1488        print_log()
1489        return s
1490
1491    def __sub__(self, other):
1492        """
1493        implicit on all axes and on Tsys
1494        """
1495        varlist = vars()
1496        s = None
1497        if isinstance(other, scantable):
1498            print "scantable - scantable NYI"
1499            return
1500        elif isinstance(other, float):
1501            s = scantable(self._math._unaryop(self, other, "SUB", False))
1502        else:
1503            raise TypeError("Other input is not a scantable or float value")
1504        s._add_history("operator -", varlist)
1505        print_log()
1506        return s
1507
1508    def __mul__(self, other):
1509        """
1510        implicit on all axes and on Tsys
1511        """
1512        varlist = vars()
1513        s = None
1514        if isinstance(other, scantable):
1515            print "scantable * scantable NYI"
1516            return
1517        elif isinstance(other, float):
1518            s = scantable(self._math._unaryop(self, other, "MUL", False))
1519        else:
1520            raise TypeError("Other input is not a scantable or float value")
1521        s._add_history("operator *", varlist)
1522        print_log()
1523        return s
1524
1525
1526    def __div__(self, other):
1527        """
1528        implicit on all axes and on Tsys
1529        """
1530        varlist = vars()
1531        s = None
1532        if isinstance(other, scantable):
1533            print "scantable / scantable NYI"
1534            return
1535        elif isinstance(other, float):
1536            if other == 0.0:
1537                raise ZeroDivisionError("Dividing by zero is not recommended")
1538            s = scantable(self._math._unaryop(self, other, "DIV", False))
1539        else:
1540            raise TypeError("Other input is not a scantable or float value")
1541        s._add_history("operator /", varlist)
1542        print_log()
1543        return s
1544
1545    def get_fit(self, row=0):
1546        """
1547        Print or return the stored fits for a row in the scantable
1548        Parameters:
1549            row:    the row which the fit has been applied to.
1550        """
1551        if row > self.nrow():
1552            return
1553        from asap.asapfit import asapfit
1554        fit = asapfit(self._getfit(row))
1555        if rcParams['verbose']:
1556            print fit
1557            return
1558        else:
1559            return fit.as_dict()
1560
1561    def _add_history(self, funcname, parameters):
1562        # create date
1563        sep = "##"
1564        from datetime import datetime
1565        dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1566        hist = dstr+sep
1567        hist += funcname+sep#cdate+sep
1568        if parameters.has_key('self'): del parameters['self']
1569        for k, v in parameters.iteritems():
1570            if type(v) is dict:
1571                for k2, v2 in v.iteritems():
1572                    hist += k2
1573                    hist += "="
1574                    if isinstance(v2, scantable):
1575                        hist += 'scantable'
1576                    elif k2 == 'mask':
1577                        if isinstance(v2, list) or isinstance(v2, tuple):
1578                            hist += str(self._zip_mask(v2))
1579                        else:
1580                            hist += str(v2)
1581                    else:
1582                        hist += str(v2)
1583            else:
1584                hist += k
1585                hist += "="
1586                if isinstance(v, scantable):
1587                    hist += 'scantable'
1588                elif k == 'mask':
1589                    if isinstance(v, list) or isinstance(v, tuple):
1590                        hist += str(self._zip_mask(v))
1591                    else:
1592                        hist += str(v)
1593                else:
1594                    hist += str(v)
1595            hist += sep
1596        hist = hist[:-2] # remove trailing '##'
1597        self._addhistory(hist)
1598
1599
1600    def _zip_mask(self, mask):
1601        mask = list(mask)
1602        i = 0
1603        segments = []
1604        while mask[i:].count(1):
1605            i += mask[i:].index(1)
1606            if mask[i:].count(0):
1607                j = i + mask[i:].index(0)
1608            else:
1609                j = len(mask)
1610            segments.append([i, j])
1611            i = j
1612        return segments
1613
1614    def _get_ordinate_label(self):
1615        fu = "("+self.get_fluxunit()+")"
1616        import re
1617        lbl = "Intensity"
1618        if re.match(".K.", fu):
1619            lbl = "Brightness Temperature "+ fu
1620        elif re.match(".Jy.", fu):
1621            lbl = "Flux density "+ fu
1622        return lbl
1623
1624    def _check_ifs(self):
1625        nchans = [self.nchan(i) for i in range(self.nif(-1))]
1626        nchans = filter(lambda t: t > 0, nchans)
1627        return (sum(nchans)/len(nchans) == nchans[0])
1628
1629    def _fill(self, names, unit, average):
1630        import os
1631        from asap._asap import stfiller
1632        first = True
1633        fullnames = []
1634        for name in names:
1635            name = os.path.expandvars(name)
1636            name = os.path.expanduser(name)
1637            if not os.path.exists(name):
1638                msg = "File '%s' does not exists" % (name)
1639                if rcParams['verbose']:
1640                    asaplog.push(msg)
1641                    print asaplog.pop().strip()
1642                    return
1643                raise IOError(msg)
1644            fullnames.append(name)
1645        if average:
1646            asaplog.push('Auto averaging integrations')
1647        stype = int(rcParams['scantable.storage'].lower() == 'disk')
1648        for name in fullnames:
1649            tbl = Scantable(stype)
1650            r = stfiller(tbl)
1651            msg = "Importing %s..." % (name)
1652            asaplog.push(msg, False)
1653            print_log()
1654            r._open(name, -1, -1)
1655            r._read()
1656            #tbl = r._getdata()
1657            if average:
1658                tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
1659                #tbl = tbl2
1660            if not first:
1661                tbl = self._math._merge([self, tbl])
1662                #tbl = tbl2
1663            Scantable.__init__(self, tbl)
1664            r._close()
1665            del r, tbl
1666            first = False
1667        if unit is not None:
1668            self.set_fluxunit(unit)
1669        self.set_freqframe(rcParams['scantable.freqframe'])
1670        #self._add_history("scantable", varlist)
1671
Note: See TracBrowser for help on using the repository browser.