source: trunk/python/scantable.py @ 1536

Last change on this file since 1536 was 1536, checked in by Malte Marquarding, 15 years ago

Fix for ticket #154: flagged data wasnrt honoured in any fitting process, e.g. poly_baseline

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 70.0 KB
Line 
1from asap._asap import Scantable
2from asap import rcParams
3from asap import print_log
4from asap import asaplog
5from asap import selector
6from asap import linecatalog
7from asap import _n_bools, mask_not, mask_and, mask_or
8
9class scantable(Scantable):
10    """
11        The ASAP container for scans
12    """
13
14    def __init__(self, filename, average=None, unit=None):
15        """
16        Create a scantable from a saved one or make a reference
17        Parameters:
18            filename:    the name of an asap table on disk
19                         or
20                         the name of a rpfits/sdfits/ms file
21                         (integrations within scans are auto averaged
22                         and the whole file is read)
23                         or
24                         [advanced] a reference to an existing
25                         scantable
26            average:     average all integrations withinb a scan on read.
27                         The default (True) is taken from .asaprc.
28            unit:         brightness unit; must be consistent with K or Jy.
29                         Over-rides the default selected by the reader
30                         (input rpfits/sdfits/ms) or replaces the value
31                         in existing scantables
32        """
33        if average is None:
34            average = rcParams['scantable.autoaverage']
35        varlist = vars()
36        from asap._asap import stmath
37        self._math = stmath()
38        if isinstance(filename, Scantable):
39            Scantable.__init__(self, filename)
40        else:
41            if isinstance(filename, str):# or \
42#                (isinstance(filename, list) or isinstance(filename, tuple)) \
43#                  and isinstance(filename[-1], str):
44                import os.path
45                filename = os.path.expandvars(filename)
46                filename = os.path.expanduser(filename)
47                if not os.path.exists(filename):
48                    s = "File '%s' not found." % (filename)
49                    if rcParams['verbose']:
50                        asaplog.push(s)
51                        print asaplog.pop().strip()
52                        return
53                    raise IOError(s)
54                if os.path.isdir(filename) \
55                    and not os.path.exists(filename+'/table.f1'):
56                    # crude check if asap table
57                    if os.path.exists(filename+'/table.info'):
58                        ondisk = rcParams['scantable.storage'] == 'disk'
59                        Scantable.__init__(self, filename, ondisk)
60                        if unit is not None:
61                            self.set_fluxunit(unit)
62                        self.set_freqframe(rcParams['scantable.freqframe'])
63                    else:
64                        msg = "The given file '%s'is not a valid " \
65                              "asap table." % (filename)
66                        if rcParams['verbose']:
67                            print msg
68                            return
69                        else:
70                            raise IOError(msg)
71                else:
72                    self._fill([filename], unit, average)
73            elif (isinstance(filename, list) or isinstance(filename, tuple)) \
74                  and isinstance(filename[-1], str):
75                self._fill(filename, unit, average)
76        self._add_history("scantable", varlist)
77        print_log()
78
79    def save(self, name=None, format=None, overwrite=False):
80        """
81        Store the scantable on disk. This can be an asap (aips++) Table,
82        SDFITS or MS2 format.
83        Parameters:
84            name:        the name of the outputfile. For format "ASCII"
85                         this is the root file name (data in 'name'.txt
86                         and header in 'name'_header.txt)
87            format:      an optional file format. Default is ASAP.
88                         Allowed are - 'ASAP' (save as ASAP [aips++] Table),
89                                       'SDFITS' (save as SDFITS file)
90                                       'ASCII' (saves as ascii text file)
91                                       'MS2' (saves as an aips++
92                                              MeasurementSet V2)
93                                       'FITS' (save as image FITS - not
94                                               readable by class)
95                                       'CLASS' (save as FITS readable by CLASS)
96            overwrite:   If the file should be overwritten if it exists.
97                         The default False is to return with warning
98                         without writing the output. USE WITH CARE.
99        Example:
100            scan.save('myscan.asap')
101            scan.save('myscan.sdfits', 'SDFITS')
102        """
103        from os import path
104        if format is None: format = rcParams['scantable.save']
105        suffix = '.'+format.lower()
106        if name is None or name == "":
107            name = 'scantable'+suffix
108            msg = "No filename given. Using default name %s..." % name
109            asaplog.push(msg)
110        name = path.expandvars(name)
111        if path.isfile(name) or path.isdir(name):
112            if not overwrite:
113                msg = "File %s exists." % name
114                if rcParams['verbose']:
115                    print msg
116                    return
117                else:
118                    raise IOError(msg)
119        format2 = format.upper()
120        if format2 == 'ASAP':
121            self._save(name)
122        else:
123            from asap._asap import stwriter as stw
124            writer = stw(format2)
125            writer.write(self, name)
126        print_log()
127        return
128
129    def copy(self):
130        """
131        Return a copy of this scantable.
132        Note:
133            This makes a full (deep) copy. scan2 = scan1 makes a reference.
134        Parameters:
135            none
136        Example:
137            copiedscan = scan.copy()
138        """
139        sd = scantable(Scantable._copy(self))
140        return sd
141
142    def drop_scan(self, scanid=None):
143        """
144        Return a new scantable where the specified scan number(s) has(have)
145        been dropped.
146        Parameters:
147            scanid:    a (list of) scan number(s)
148        """
149        from asap import _is_sequence_or_number as _is_valid
150        from asap import _to_list
151        from asap import unique
152        if not _is_valid(scanid):
153            if rcParams['verbose']:
154                print "Please specify a scanno to drop from the scantable"
155                return
156            else:
157                raise RuntimeError("No scan given")
158        try:
159            scanid = _to_list(scanid)
160            allscans = unique([ self.getscan(i) for i in range(self.nrow())])
161            for sid in scanid: allscans.remove(sid)
162            if len(allscans) == 0:
163                raise ValueError("Can't remove all scans")
164        except ValueError:
165            if rcParams['verbose']:
166                print "Couldn't find any match."
167                return
168            else: raise
169        try:
170            bsel = self.get_selection()
171            sel = selector()
172            sel.set_scans(allscans)
173            self.set_selection(bsel+sel)
174            scopy = self._copy()
175            self.set_selection(bsel)
176            return scantable(scopy)
177        except RuntimeError:
178            if rcParams['verbose']:
179                print "Couldn't find any match."
180            else:
181                raise
182
183
184    def get_scan(self, scanid=None):
185        """
186        Return a specific scan (by scanno) or collection of scans (by
187        source name) in a new scantable.
188        Note:
189            See scantable.drop_scan() for the inverse operation.
190        Parameters:
191            scanid:    a (list of) scanno or a source name, unix-style
192                       patterns are accepted for source name matching, e.g.
193                       '*_R' gets all 'ref scans
194        Example:
195            # get all scans containing the source '323p459'
196            newscan = scan.get_scan('323p459')
197            # get all 'off' scans
198            refscans = scan.get_scan('*_R')
199            # get a susbset of scans by scanno (as listed in scan.summary())
200            newscan = scan.get_scan([0, 2, 7, 10])
201        """
202        if scanid is None:
203            if rcParams['verbose']:
204                print "Please specify a scan no or name to " \
205                      "retrieve from the scantable"
206                return
207            else:
208                raise RuntimeError("No scan given")
209
210        try:
211            bsel = self.get_selection()
212            sel = selector()
213            if type(scanid) is str:
214                sel.set_name(scanid)
215                self.set_selection(bsel+sel)
216                scopy = self._copy()
217                self.set_selection(bsel)
218                return scantable(scopy)
219            elif type(scanid) is int:
220                sel.set_scans([scanid])
221                self.set_selection(bsel+sel)
222                scopy = self._copy()
223                self.set_selection(bsel)
224                return scantable(scopy)
225            elif type(scanid) is list:
226                sel.set_scans(scanid)
227                self.set_selection(sel)
228                scopy = self._copy()
229                self.set_selection(bsel)
230                return scantable(scopy)
231            else:
232                msg = "Illegal scanid type, use 'int' or 'list' if ints."
233                if rcParams['verbose']:
234                    print msg
235                else:
236                    raise TypeError(msg)
237        except RuntimeError:
238            if rcParams['verbose']: print "Couldn't find any match."
239            else: raise
240
241    def __str__(self):
242        return Scantable._summary(self, True)
243
244    def summary(self, filename=None):
245        """
246        Print a summary of the contents of this scantable.
247        Parameters:
248            filename:    the name of a file to write the putput to
249                         Default - no file output
250            verbose:     print extra info such as the frequency table
251                         The default (False) is taken from .asaprc
252        """
253        info = Scantable._summary(self, True)
254        #if verbose is None: verbose = rcParams['scantable.verbosesummary']
255        if filename is not None:
256            if filename is "":
257                filename = 'scantable_summary.txt'
258            from os.path import expandvars, isdir
259            filename = expandvars(filename)
260            if not isdir(filename):
261                data = open(filename, 'w')
262                data.write(info)
263                data.close()
264            else:
265                msg = "Illegal file name '%s'." % (filename)
266                if rcParams['verbose']:
267                    print msg
268                else:
269                    raise IOError(msg)
270        if rcParams['verbose']:
271            try:
272                from IPython.genutils import page as pager
273            except ImportError:
274                from pydoc import pager
275            pager(info)
276        else:
277            return info
278
279    def get_spectrum(self, rowno):
280        """Return the spectrum for the current row in the scantable as a list.
281        Parameters:
282             rowno:   the row number to retrieve the spectrum from       
283        """
284        return self._getspectrum(rowno)
285
286    def get_mask(self, rowno):
287        """Return the mask for the current row in the scantable as a list.
288        Parameters:
289             rowno:   the row number to retrieve the mask from       
290        """
291        return self._getmask(rowno)
292
293    def set_spectrum(self, spec, rowno):
294        """Return the spectrum for the current row in the scantable as a list.
295        Parameters:
296             spec:   the spectrum
297             rowno:    the row number to set the spectrum for       
298        """
299        assert(len(spec) == self.nchan())
300        return self._setspectrum(spec, rowno)
301
302    def get_selection(self):
303        """
304        Get the selection object currently set on this scantable.
305        Parameters:
306            none
307        Example:
308            sel = scan.get_selection()
309            sel.set_ifs(0)              # select IF 0
310            scan.set_selection(sel)     # apply modified selection
311        """
312        return selector(self._getselection())
313
314    def set_selection(self, selection=selector()):
315        """
316        Select a subset of the data. All following operations on this scantable
317        are only applied to thi selection.
318        Parameters:
319            selection:    a selector object (default unset the selection)
320        Examples:
321            sel = selector()         # create a selection object
322            self.set_scans([0, 3])    # select SCANNO 0 and 3
323            scan.set_selection(sel)  # set the selection
324            scan.summary()           # will only print summary of scanno 0 an 3
325            scan.set_selection()     # unset the selection
326        """
327        self._setselection(selection)
328
329    def stats(self, stat='stddev', mask=None):
330        """
331        Determine the specified statistic of the current beam/if/pol
332        Takes a 'mask' as an optional parameter to specify which
333        channels should be excluded.
334        Parameters:
335            stat:    'min', 'max', 'sumsq', 'sum', 'mean'
336                     'var', 'stddev', 'avdev', 'rms', 'median'
337            mask:    an optional mask specifying where the statistic
338                     should be determined.
339        Example:
340            scan.set_unit('channel')
341            msk = scan.create_mask([100, 200], [500, 600])
342            scan.stats(stat='mean', mask=m)
343        """
344        if mask == None:
345            mask = []
346        axes = ['Beam', 'IF', 'Pol', 'Time']
347        if not self._check_ifs():
348            raise ValueError("Cannot apply mask as the IFs have different "
349                             "number of channels. Please use setselection() "
350                             "to select individual IFs")
351
352        statvals = self._math._stats(self, mask, stat)
353        out = ''
354        axes = []
355        for i in range(self.nrow()):
356            axis = []
357            axis.append(self.getscan(i))
358            axis.append(self.getbeam(i))
359            axis.append(self.getif(i))
360            axis.append(self.getpol(i))
361            axis.append(self.getcycle(i))
362            axes.append(axis)
363            tm = self._gettime(i)
364            src = self._getsourcename(i)
365            out += 'Scan[%d] (%s) ' % (axis[0], src)
366            out += 'Time[%s]:\n' % (tm)
367            if self.nbeam(-1) > 1: out +=  ' Beam[%d] ' % (axis[1])
368            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (axis[2])
369            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (axis[3])
370            out += '= %3.3f\n' % (statvals[i])
371            out +=  "--------------------------------------------------\n"
372
373        if rcParams['verbose']:
374            print "--------------------------------------------------"
375            print " ", stat
376            print "--------------------------------------------------"
377            print out
378        #else:
379            #retval = { 'axesnames': ['scanno', 'beamno', 'ifno', 'polno', 'cycleno'],
380            #           'axes' : axes,
381            #           'data': statvals}
382        return statvals
383
384    def stddev(self, mask=None):
385        """
386        Determine the standard deviation of the current beam/if/pol
387        Takes a 'mask' as an optional parameter to specify which
388        channels should be excluded.
389        Parameters:
390            mask:    an optional mask specifying where the standard
391                     deviation should be determined.
392
393        Example:
394            scan.set_unit('channel')
395            msk = scan.create_mask([100, 200], [500, 600])
396            scan.stddev(mask=m)
397        """
398        return self.stats(stat='stddev', mask=mask);
399
400
401    def get_column_names(self):
402        """
403        Return a  list of column names, which can be used for selection.
404        """
405        return list(Scantable.get_column_names(self))
406
407    def get_tsys(self):
408        """
409        Return the System temperatures.
410        Returns:
411            a list of Tsys values for the current selection
412        """
413
414        return self._row_callback(self._gettsys, "Tsys")
415
416    def _row_callback(self, callback, label):
417        axes = []
418        axesnames = ['scanno', 'beamno', 'ifno', 'polno', 'cycleno']
419        out = ""
420        outvec = []
421        for i in range(self.nrow()):
422            axis = []
423            axis.append(self.getscan(i))
424            axis.append(self.getbeam(i))
425            axis.append(self.getif(i))
426            axis.append(self.getpol(i))
427            axis.append(self.getcycle(i))
428            axes.append(axis)
429            tm = self._gettime(i)
430            src = self._getsourcename(i)
431            out += 'Scan[%d] (%s) ' % (axis[0], src)
432            out += 'Time[%s]:\n' % (tm)
433            if self.nbeam(-1) > 1: out +=  ' Beam[%d] ' % (axis[1])
434            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (axis[2])
435            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (axis[3])
436            outvec.append(callback(i))
437            out += '= %3.3f\n' % (outvec[i])
438            out +=  "--------------------------------------------------\n"
439        if rcParams['verbose']:
440            print "--------------------------------------------------"
441            print " %s" % (label)
442            print "--------------------------------------------------"
443            print out
444        # disabled because the vector seems more useful
445        #retval = {'axesnames': axesnames, 'axes': axes, 'data': outvec}
446        return outvec
447
448    def _get_column(self, callback, row=-1):
449        """
450        """
451        if row == -1:
452            return [callback(i) for i in range(self.nrow())]
453        else:
454            if  0 <= row < self.nrow():
455                return callback(row)
456
457
458    def get_time(self, row=-1, asdatetime=False):
459        """
460        Get a list of time stamps for the observations.
461        Return a datetime object for each integration time stamp in the scantable.
462        Parameters:
463            row:          row no of integration. Default -1 return all rows
464            asdatetime:   return values as datetime objects rather than strings
465        Example:
466            none
467        """
468        from time import strptime
469        from datetime import datetime
470        times = self._get_column(self._gettime, row)
471        if not asdatetime:
472            return times
473        format = "%Y/%m/%d/%H:%M:%S"
474        if isinstance(times, list):
475            return [datetime(*strptime(i, format)[:6]) for i in times]
476        else:
477            return datetime(*strptime(times, format)[:6])
478
479
480    def get_inttime(self, row=-1):
481        """
482        Get a list of integration times for the observations.
483        Return a time in seconds for each integration in the scantable.
484        Parameters:
485            row:    row no of integration. Default -1 return all rows.
486        Example:
487            none
488        """
489        return self._get_column(self._getinttime, row)       
490       
491
492    def get_sourcename(self, row=-1):
493        """
494        Get a list source names for the observations.
495        Return a string for each integration in the scantable.
496        Parameters:
497            row:    row no of integration. Default -1 return all rows.
498        Example:
499            none
500        """
501        return self._get_column(self._getsourcename, row)
502
503    def get_elevation(self, row=-1):
504        """
505        Get a list of elevations for the observations.
506        Return a float for each integration in the scantable.
507        Parameters:
508            row:    row no of integration. Default -1 return all rows.
509        Example:
510            none
511        """
512        return self._get_column(self._getelevation, row)
513
514    def get_azimuth(self, row=-1):
515        """
516        Get a list of azimuths for the observations.
517        Return a float for each integration in the scantable.
518        Parameters:
519            row:    row no of integration. Default -1 return all rows.
520        Example:
521            none
522        """
523        return self._get_column(self._getazimuth, row)
524
525    def get_parangle(self, row=-1):
526        """
527        Get a list of parallactic angles for the observations.
528        Return a float for each integration in the scantable.
529        Parameters:
530            row:    row no of integration. Default -1 return all rows.
531        Example:
532            none
533        """
534        return self._get_column(self._getparangle, row)
535
536    def get_direction(self, row=-1):
537        """
538        Get a list of Positions on the sky (direction) for the observations.
539        Return a float for each integration in the scantable.
540        Parameters:
541            row:    row no of integration. Default -1 return all rows
542        Example:
543            none
544        """
545        return self._get_column(self._getdirection, row)
546
547    def get_directionval(self, row=-1):
548        """
549        Get a list of Positions on the sky (direction) for the observations.
550        Return a float for each integration in the scantable.
551        Parameters:
552            row:    row no of integration. Default -1 return all rows
553        Example:
554            none
555        """
556        return self._get_column(self._getdirectionvec, row)
557
558
559    def set_unit(self, unit='channel'):
560        """
561        Set the unit for all following operations on this scantable
562        Parameters:
563            unit:    optional unit, default is 'channel'
564                     one of '*Hz', 'km/s', 'channel', ''
565        """
566        varlist = vars()
567        if unit in ['', 'pixel', 'channel']:
568            unit = ''
569        inf = list(self._getcoordinfo())
570        inf[0] = unit
571        self._setcoordinfo(inf)
572        self._add_history("set_unit", varlist)
573
574    def set_instrument(self, instr):
575        """
576        Set the instrument for subsequent processing.
577        Parameters:
578            instr:    Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
579                      'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
580        """
581        self._setInstrument(instr)
582        self._add_history("set_instument", vars())
583        print_log()
584
585    def set_feedtype(self, feedtype):
586        """
587        Overwrite the feed type, which might not be set correctly.
588        Parameters:
589            feedtype:     'linear' or 'circular'
590        """
591        self._setfeedtype(feedtype)
592        self._add_history("set_feedtype", vars())
593        print_log()
594
595    def set_doppler(self, doppler='RADIO'):
596        """
597        Set the doppler for all following operations on this scantable.
598        Parameters:
599            doppler:    One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
600        """
601        varlist = vars()
602        inf = list(self._getcoordinfo())
603        inf[2] = doppler
604        self._setcoordinfo(inf)
605        self._add_history("set_doppler", vars())
606        print_log()
607
608    def set_freqframe(self, frame=None):
609        """
610        Set the frame type of the Spectral Axis.
611        Parameters:
612            frame:   an optional frame type, default 'LSRK'. Valid frames are:
613                     'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
614                     'GEO', 'GALACTO', 'LGROUP', 'CMB'
615        Examples:
616            scan.set_freqframe('BARY')
617        """
618        if frame is None: frame = rcParams['scantable.freqframe']
619        varlist = vars()
620        valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
621                   'GEO', 'GALACTO', 'LGROUP', 'CMB']
622
623        if frame in valid:
624            inf = list(self._getcoordinfo())
625            inf[1] = frame
626            self._setcoordinfo(inf)
627            self._add_history("set_freqframe", varlist)
628        else:
629            msg  = "Please specify a valid freq type. Valid types are:\n", valid
630            if rcParams['verbose']:
631                print msg
632            else:
633                raise TypeError(msg)
634        print_log()
635
636    def set_dirframe(self, frame=""):
637        """
638        Set the frame type of the Direction on the sky.
639        Parameters:
640            frame:   an optional frame type, default ''. Valid frames are:
641                     'J2000', 'B1950', 'GALACTIC'
642        Examples:
643            scan.set_dirframe('GALACTIC')
644        """
645        varlist = vars()
646        try:
647            Scantable.set_dirframe(self, frame)
648        except RuntimeError, msg:
649            if rcParams['verbose']:
650                print msg
651            else:
652                raise
653        self._add_history("set_dirframe", varlist)
654
655    def get_unit(self):
656        """
657        Get the default unit set in this scantable
658        Returns:
659            A unit string
660        """
661        inf = self._getcoordinfo()
662        unit = inf[0]
663        if unit == '': unit = 'channel'
664        return unit
665
666    def get_abcissa(self, rowno=0):
667        """
668        Get the abcissa in the current coordinate setup for the currently
669        selected Beam/IF/Pol
670        Parameters:
671            rowno:    an optional row number in the scantable. Default is the
672                      first row, i.e. rowno=0
673        Returns:
674            The abcissa values and the format string (as a dictionary)
675        """
676        abc = self._getabcissa(rowno)
677        lbl = self._getabcissalabel(rowno)
678        print_log()
679        return abc, lbl
680
681    def flag(self, mask=None):
682        """
683        Flag the selected data using an optional channel mask.
684        Parameters:
685            mask:   an optional channel mask, created with create_mask. Default
686                    (no mask) is all channels.
687        """
688        varlist = vars()
689        if mask is None:
690            mask = []
691        try:
692            self._flag(mask)
693        except RuntimeError, msg:
694            if rcParams['verbose']:
695                print msg
696                return
697            else: raise
698        self._add_history("flag", varlist)
699
700    def lag_flag(self, frequency, width=0.0, unit="GHz", insitu=None):
701        """
702        Flag the data in 'lag' space by providing a frequency to remove.
703        Flagged data in the scantable gets set to 0.0 before the fft.
704        No taper is applied.
705        Parameters:
706            frequency:    the frequency (really a period within the bandwidth)
707                          to remove
708            width:        the width of the frequency to remove, to remove a
709                          range of frequencies around the centre.
710            unit:         the frequency unit (default "GHz")
711        Notes:
712            It is recommended to flag edges of the band or strong
713            signals beforehand.
714        """
715        if insitu is None: insitu = rcParams['insitu']
716        self._math._setinsitu(insitu)
717        varlist = vars()
718        base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1. }
719        if not base.has_key(unit):
720            raise ValueError("%s is not a valid unit." % unit)
721        try:
722            s = scantable(self._math._lag_flag(self, frequency*base[unit],
723                                               width*base[unit]))
724        except RuntimeError, msg:
725            if rcParams['verbose']:
726                print msg
727                return
728            else: raise
729        s._add_history("lag_flag", varlist)
730        print_log()
731        if insitu:
732            self._assign(s)
733        else:
734            return s
735
736
737    def create_mask(self, *args, **kwargs):
738        """
739        Compute and return a mask based on [min, max] windows.
740        The specified windows are to be INCLUDED, when the mask is
741        applied.
742        Parameters:
743            [min, max], [min2, max2], ...
744                Pairs of start/end points (inclusive)specifying the regions
745                to be masked
746            invert:     optional argument. If specified as True,
747                        return an inverted mask, i.e. the regions
748                        specified are EXCLUDED
749            row:        create the mask using the specified row for
750                        unit conversions, default is row=0
751                        only necessary if frequency varies over rows.
752        Example:
753            scan.set_unit('channel')
754            a)
755            msk = scan.create_mask([400, 500], [800, 900])
756            # masks everything outside 400 and 500
757            # and 800 and 900 in the unit 'channel'
758
759            b)
760            msk = scan.create_mask([400, 500], [800, 900], invert=True)
761            # masks the regions between 400 and 500
762            # and 800 and 900 in the unit 'channel'
763            c)
764            mask only channel 400
765            msk =  scan.create_mask([400, 400])
766        """
767        row = 0
768        if kwargs.has_key("row"):
769            row = kwargs.get("row")
770        data = self._getabcissa(row)
771        u = self._getcoordinfo()[0]
772        if rcParams['verbose']:
773            if u == "": u = "channel"
774            msg = "The current mask window unit is %s" % u
775            i = self._check_ifs()
776            if not i:
777                msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
778            asaplog.push(msg)
779        n = self.nchan()
780        msk = _n_bools(n, False)
781        # test if args is a 'list' or a 'normal *args - UGLY!!!
782
783        ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
784             and args or args[0]
785        for window in ws:
786            if (len(window) != 2 or window[0] > window[1] ):
787                raise TypeError("A window needs to be defined as [min, max]")
788            for i in range(n):
789                if data[i] >= window[0] and data[i] <= window[1]:
790                    msk[i] = True
791        if kwargs.has_key('invert'):
792            if kwargs.get('invert'):
793                msk = mask_not(msk)
794        print_log()
795        return msk
796
797    def get_restfreqs(self):
798        """
799        Get the restfrequency(s) stored in this scantable.
800        The return value(s) are always of unit 'Hz'
801        Parameters:
802            none
803        Returns:
804            a list of doubles
805        """
806        return list(self._getrestfreqs())
807
808
809    def set_restfreqs(self, freqs=None, unit='Hz'):
810        """
811        Set or replace the restfrequency specified and
812        If the 'freqs' argument holds a scalar,
813        then that rest frequency will be applied to all the selected
814        data.  If the 'freqs' argument holds
815        a vector, then it MUST be of equal or smaller length than
816        the number of IFs (and the available restfrequencies will be
817        replaced by this vector).  In this case, *all* data have
818        the restfrequency set per IF according
819        to the corresponding value you give in the 'freqs' vector.
820        E.g. 'freqs=[1e9, 2e9]'  would mean IF 0 gets restfreq 1e9 and
821        IF 1 gets restfreq 2e9.
822        You can also specify the frequencies via a linecatalog.
823
824        Parameters:
825            freqs:   list of rest frequency values or string idenitfiers
826            unit:    unit for rest frequency (default 'Hz')
827
828        Example:
829            # set the given restfrequency for the whole table
830            scan.set_restfreqs(freqs=1.4e9)
831            # If thee number of IFs in the data is >= 2 IF0 gets the first
832            # value IF1 the second...
833            scan.set_restfreqs(freqs=[1.4e9, 1.67e9])
834            #set the given restfrequency for the whole table (by name)
835            scan.set_restfreqs(freqs="OH1667")
836
837        Note:
838            To do more sophisticate Restfrequency setting, e.g. on a
839            source and IF basis, use scantable.set_selection() before using
840            this function.
841            # provide your scantable is call scan
842            selection = selector()
843            selection.set_name("ORION*")
844            selection.set_ifs([1])
845            scan.set_selection(selection)
846            scan.set_restfreqs(freqs=86.6e9)
847
848        """
849        varlist = vars()
850        from asap import linecatalog
851        # simple  value
852        if isinstance(freqs, int) or isinstance(freqs, float):
853            self._setrestfreqs(freqs, "",unit)
854        # list of values
855        elif isinstance(freqs, list) or isinstance(freqs, tuple):
856            # list values are scalars
857            if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
858                sel = selector()
859                savesel = self._getselection()
860                iflist = self.getifnos()
861                for i in xrange(len(freqs)):
862                    sel.set_ifs(iflist[i])
863                    self._setselection(sel)
864                    self._setrestfreqs(freqs[i], "",unit)
865                self._setselection(savesel)
866            # list values are tuples, (value, name)
867            elif isinstance(freqs[-1], dict):
868                sel = selector()
869                savesel = self._getselection()
870                iflist = self.getifnos()
871                for i in xrange(len(freqs)):
872                    sel.set_ifs(iflist[i])
873                    self._setselection(sel)
874                    self._setrestfreqs(freqs[i]["value"],
875                                       freqs[i]["name"], "MHz")
876                self._setselection(savesel)
877        # freqs are to be taken from a linecatalog
878        elif isinstance(freqs, linecatalog):
879            sel = selector()
880            savesel = self._getselection()
881            iflist = self.getifnos()
882            for i in xrange(freqs.nrow()):
883                sel.set_ifs(iflist[i])
884                self._setselection(sel)
885                self._setrestfreqs(freqs.get_frequency(i),
886                                   freqs.get_name(i), "MHz")
887                # ensure that we are not iterating past nIF
888                if i == self.nif()-1: break
889            self._setselection(savesel)
890        else:
891            return
892        self._add_history("set_restfreqs", varlist)
893
894    def shift_refpix(self, delta):
895        """
896        Shift the reference pixel of the Spectra Coordinate by an
897        integer amount.
898        Parameters:
899            delta:   the amount to shift by
900        Note:
901            Be careful using this with broadband data.
902        """
903        Scantable.shift(self, delta)
904
905    def history(self, filename=None):
906        """
907        Print the history. Optionally to a file.
908        Parameters:
909            filename:    The name  of the file to save the history to.
910        """
911        hist = list(self._gethistory())
912        out = "-"*80
913        for h in hist:
914            if h.startswith("---"):
915                out += "\n"+h
916            else:
917                items = h.split("##")
918                date = items[0]
919                func = items[1]
920                items = items[2:]
921                out += "\n"+date+"\n"
922                out += "Function: %s\n  Parameters:" % (func)
923                for i in items:
924                    s = i.split("=")
925                    out += "\n   %s = %s" % (s[0], s[1])
926                out += "\n"+"-"*80
927        if filename is not None:
928            if filename is "":
929                filename = 'scantable_history.txt'
930            import os
931            filename = os.path.expandvars(os.path.expanduser(filename))
932            if not os.path.isdir(filename):
933                data = open(filename, 'w')
934                data.write(out)
935                data.close()
936            else:
937                msg = "Illegal file name '%s'." % (filename)
938                if rcParams['verbose']:
939                    print msg
940                else:
941                    raise IOError(msg)
942        if rcParams['verbose']:
943            try:
944                from IPython.genutils import page as pager
945            except ImportError:
946                from pydoc import pager
947            pager(out)
948        else:
949            return out
950        return
951    #
952    # Maths business
953    #
954
955    def average_time(self, mask=None, scanav=False, weight='tint', align=False):
956        """
957        Return the (time) weighted average of a scan.
958        Note:
959            in channels only - align if necessary
960        Parameters:
961            mask:     an optional mask (only used for 'var' and 'tsys'
962                      weighting)
963            scanav:   True averages each scan separately
964                      False (default) averages all scans together,
965            weight:   Weighting scheme.
966                      'none'     (mean no weight)
967                      'var'      (1/var(spec) weighted)
968                      'tsys'     (1/Tsys**2 weighted)
969                      'tint'     (integration time weighted)
970                      'tintsys'  (Tint/Tsys**2)
971                      'median'   ( median averaging)
972                      The default is 'tint'
973            align:    align the spectra in velocity before averaging. It takes
974                      the time of the first spectrum as reference time.
975        Example:
976            # time average the scantable without using a mask
977            newscan = scan.average_time()
978        """
979        varlist = vars()
980        if weight is None: weight = 'TINT'
981        if mask is None: mask = ()
982        if scanav: scanav = "SCAN"
983        else: scanav = "NONE"
984        scan = (self, )
985        try:
986            if align:
987                scan = (self.freq_align(insitu=False), )
988            s = None
989            if weight.upper() == 'MEDIAN':
990                s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
991                                                         scanav))
992            else:
993                s = scantable(self._math._average(scan, mask, weight.upper(),
994                              scanav))
995        except RuntimeError, msg:
996            if rcParams['verbose']:
997                print msg
998                return
999            else: raise
1000        s._add_history("average_time", varlist)
1001        print_log()
1002        return s
1003
1004    def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
1005        """
1006        Return a scan where all spectra are converted to either
1007        Jansky or Kelvin depending upon the flux units of the scan table.
1008        By default the function tries to look the values up internally.
1009        If it can't find them (or if you want to over-ride), you must
1010        specify EITHER jyperk OR eta (and D which it will try to look up
1011        also if you don't set it). jyperk takes precedence if you set both.
1012        Parameters:
1013            jyperk:      the Jy / K conversion factor
1014            eta:         the aperture efficiency
1015            d:           the geomtric diameter (metres)
1016            insitu:      if False a new scantable is returned.
1017                         Otherwise, the scaling is done in-situ
1018                         The default is taken from .asaprc (False)
1019        """
1020        if insitu is None: insitu = rcParams['insitu']
1021        self._math._setinsitu(insitu)
1022        varlist = vars()
1023        if jyperk is None: jyperk = -1.0
1024        if d is None: d = -1.0
1025        if eta is None: eta = -1.0
1026        s = scantable(self._math._convertflux(self, d, eta, jyperk))
1027        s._add_history("convert_flux", varlist)
1028        print_log()
1029        if insitu: self._assign(s)
1030        else: return s
1031
1032    def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1033        """
1034        Return a scan after applying a gain-elevation correction.
1035        The correction can be made via either a polynomial or a
1036        table-based interpolation (and extrapolation if necessary).
1037        You specify polynomial coefficients, an ascii table or neither.
1038        If you specify neither, then a polynomial correction will be made
1039        with built in coefficients known for certain telescopes (an error
1040        will occur if the instrument is not known).
1041        The data and Tsys are *divided* by the scaling factors.
1042        Parameters:
1043            poly:        Polynomial coefficients (default None) to compute a
1044                         gain-elevation correction as a function of
1045                         elevation (in degrees).
1046            filename:    The name of an ascii file holding correction factors.
1047                         The first row of the ascii file must give the column
1048                         names and these MUST include columns
1049                         "ELEVATION" (degrees) and "FACTOR" (multiply data
1050                         by this) somewhere.
1051                         The second row must give the data type of the
1052                         column. Use 'R' for Real and 'I' for Integer.
1053                         An example file would be
1054                         (actual factors are arbitrary) :
1055
1056                         TIME ELEVATION FACTOR
1057                         R R R
1058                         0.1 0 0.8
1059                         0.2 20 0.85
1060                         0.3 40 0.9
1061                         0.4 60 0.85
1062                         0.5 80 0.8
1063                         0.6 90 0.75
1064            method:      Interpolation method when correcting from a table.
1065                         Values are  "nearest", "linear" (default), "cubic"
1066                         and "spline"
1067            insitu:      if False a new scantable is returned.
1068                         Otherwise, the scaling is done in-situ
1069                         The default is taken from .asaprc (False)
1070        """
1071
1072        if insitu is None: insitu = rcParams['insitu']
1073        self._math._setinsitu(insitu)
1074        varlist = vars()
1075        if poly is None:
1076            poly = ()
1077        from os.path import expandvars
1078        filename = expandvars(filename)
1079        s = scantable(self._math._gainel(self, poly, filename, method))
1080        s._add_history("gain_el", varlist)
1081        print_log()
1082        if insitu: self._assign(s)
1083        else: return s
1084
1085    def freq_align(self, reftime=None, method='cubic', insitu=None):
1086        """
1087        Return a scan where all rows have been aligned in frequency/velocity.
1088        The alignment frequency frame (e.g. LSRK) is that set by function
1089        set_freqframe.
1090        Parameters:
1091            reftime:     reference time to align at. By default, the time of
1092                         the first row of data is used.
1093            method:      Interpolation method for regridding the spectra.
1094                         Choose from "nearest", "linear", "cubic" (default)
1095                         and "spline"
1096            insitu:      if False a new scantable is returned.
1097                         Otherwise, the scaling is done in-situ
1098                         The default is taken from .asaprc (False)
1099        """
1100        if insitu is None: insitu = rcParams["insitu"]
1101        self._math._setinsitu(insitu)
1102        varlist = vars()
1103        if reftime is None: reftime = ""
1104        s = scantable(self._math._freq_align(self, reftime, method))
1105        s._add_history("freq_align", varlist)
1106        print_log()
1107        if insitu: self._assign(s)
1108        else: return s
1109
1110    def opacity(self, tau, insitu=None):
1111        """
1112        Apply an opacity correction. The data
1113        and Tsys are multiplied by the correction factor.
1114        Parameters:
1115            tau:         Opacity from which the correction factor is
1116                         exp(tau*ZD)
1117                         where ZD is the zenith-distance
1118            insitu:      if False a new scantable is returned.
1119                         Otherwise, the scaling is done in-situ
1120                         The default is taken from .asaprc (False)
1121        """
1122        if insitu is None: insitu = rcParams['insitu']
1123        self._math._setinsitu(insitu)
1124        varlist = vars()
1125        s = scantable(self._math._opacity(self, tau))
1126        s._add_history("opacity", varlist)
1127        print_log()
1128        if insitu: self._assign(s)
1129        else: return s
1130
1131    def bin(self, width=5, insitu=None):
1132        """
1133        Return a scan where all spectra have been binned up.
1134        Parameters:
1135            width:       The bin width (default=5) in pixels
1136            insitu:      if False a new scantable is returned.
1137                         Otherwise, the scaling is done in-situ
1138                         The default is taken from .asaprc (False)
1139        """
1140        if insitu is None: insitu = rcParams['insitu']
1141        self._math._setinsitu(insitu)
1142        varlist = vars()
1143        s = scantable(self._math._bin(self, width))
1144        s._add_history("bin", varlist)
1145        print_log()
1146        if insitu: self._assign(s)
1147        else: return s
1148
1149
1150    def resample(self, width=5, method='cubic', insitu=None):
1151        """
1152        Return a scan where all spectra have been binned up.
1153       
1154        Parameters:
1155            width:       The bin width (default=5) in pixels
1156            method:      Interpolation method when correcting from a table.
1157                         Values are  "nearest", "linear", "cubic" (default)
1158                         and "spline"
1159            insitu:      if False a new scantable is returned.
1160                         Otherwise, the scaling is done in-situ
1161                         The default is taken from .asaprc (False)
1162        """
1163        if insitu is None: insitu = rcParams['insitu']
1164        self._math._setinsitu(insitu)
1165        varlist = vars()
1166        s = scantable(self._math._resample(self, method, width))
1167        s._add_history("resample", varlist)
1168        print_log()
1169        if insitu: self._assign(s)
1170        else: return s
1171
1172
1173    def average_pol(self, mask=None, weight='none'):
1174        """
1175        Average the Polarisations together.
1176        Parameters:
1177            mask:        An optional mask defining the region, where the
1178                         averaging will be applied. The output will have all
1179                         specified points masked.
1180            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1181                         weighted), or 'tsys' (1/Tsys**2 weighted)
1182        """
1183        varlist = vars()
1184        if mask is None:
1185            mask = ()
1186        s = scantable(self._math._averagepol(self, mask, weight.upper()))
1187        s._add_history("average_pol", varlist)
1188        print_log()
1189        return s
1190
1191    def average_beam(self, mask=None, weight='none'):
1192        """
1193        Average the Beams together.
1194        Parameters:
1195            mask:        An optional mask defining the region, where the
1196                         averaging will be applied. The output will have all
1197                         specified points masked.
1198            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1199                         weighted), or 'tsys' (1/Tsys**2 weighted)
1200        """
1201        varlist = vars()
1202        if mask is None:
1203            mask = ()
1204        s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1205        s._add_history("average_beam", varlist)
1206        print_log()
1207        return s
1208
1209    def convert_pol(self, poltype=None):
1210        """
1211        Convert the data to a different polarisation type.
1212        Parameters:
1213            poltype:    The new polarisation type. Valid types are:
1214                        "linear", "stokes" and "circular"
1215        """
1216        varlist = vars()
1217        try:
1218            s = scantable(self._math._convertpol(self, poltype))
1219        except RuntimeError, msg:
1220            if rcParams['verbose']:
1221                print msg
1222                return
1223            else:
1224                raise
1225        s._add_history("convert_pol", varlist)
1226        print_log()
1227        return s
1228
1229    def smooth(self, kernel="hanning", width=5.0, insitu=None):
1230        """
1231        Smooth the spectrum by the specified kernel (conserving flux).
1232        Parameters:
1233            kernel:     The type of smoothing kernel. Select from
1234                        'hanning' (default), 'gaussian', 'boxcar' and
1235                        'rmedian'
1236            width:      The width of the kernel in pixels. For hanning this is
1237                        ignored otherwise it defauls to 5 pixels.
1238                        For 'gaussian' it is the Full Width Half
1239                        Maximum. For 'boxcar' it is the full width.
1240                        For 'rmedian' it is the half width.
1241            insitu:     if False a new scantable is returned.
1242                        Otherwise, the scaling is done in-situ
1243                        The default is taken from .asaprc (False)
1244        Example:
1245             none
1246        """
1247        if insitu is None: insitu = rcParams['insitu']
1248        self._math._setinsitu(insitu)
1249        varlist = vars()
1250        s = scantable(self._math._smooth(self, kernel.lower(), width))
1251        s._add_history("smooth", varlist)
1252        print_log()
1253        if insitu: self._assign(s)
1254        else: return s
1255
1256
1257    def poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None):
1258        """
1259        Return a scan which has been baselined (all rows) by a polynomial.
1260        Parameters:
1261            mask:       an optional mask
1262            order:      the order of the polynomial (default is 0)
1263            plot:       plot the fit and the residual. In this each
1264                        indivual fit has to be approved, by typing 'y'
1265                        or 'n'
1266            uselin:     use linear polynomial fit
1267            insitu:     if False a new scantable is returned.
1268                        Otherwise, the scaling is done in-situ
1269                        The default is taken from .asaprc (False)
1270        Example:
1271            # return a scan baselined by a third order polynomial,
1272            # not using a mask
1273            bscan = scan.poly_baseline(order=3)
1274        """
1275        if insitu is None: insitu = rcParams['insitu']
1276        varlist = vars()
1277        if mask is None:
1278            mask = [True for i in xrange(self.nchan(-1))]
1279        from asap.asapfitter import fitter
1280        try:
1281            f = fitter()
1282            f.set_scan(self, mask)
1283            #f.set_function(poly=order)
1284            if uselin:
1285                f.set_function(lpoly=order)
1286            else:
1287                f.set_function(poly=order)
1288            s = f.auto_fit(insitu, plot=plot)
1289            s._add_history("poly_baseline", varlist)
1290            print_log()
1291            if insitu: self._assign(s)
1292            else: return s
1293        except RuntimeError:
1294            msg = "The fit failed, possibly because it didn't converge."
1295            if rcParams['verbose']:
1296                print msg
1297                return
1298            else:
1299                raise RuntimeError(msg)
1300
1301
1302    def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
1303                           threshold=3, chan_avg_limit=1, plot=False,
1304                           insitu=None):
1305        """
1306        Return a scan which has been baselined (all rows) by a polynomial.
1307        Spectral lines are detected first using linefinder and masked out
1308        to avoid them affecting the baseline solution.
1309
1310        Parameters:
1311            mask:       an optional mask retreived from scantable
1312            edge:       an optional number of channel to drop at
1313                        the edge of spectrum. If only one value is
1314                        specified, the same number will be dropped from
1315                        both sides of the spectrum. Default is to keep
1316                        all channels. Nested tuples represent individual
1317                        edge selection for different IFs (a number of spectral
1318                        channels can be different)
1319            order:      the order of the polynomial (default is 0)
1320            threshold:  the threshold used by line finder. It is better to
1321                        keep it large as only strong lines affect the
1322                        baseline solution.
1323            chan_avg_limit:
1324                        a maximum number of consequtive spectral channels to
1325                        average during the search of weak and broad lines.
1326                        The default is no averaging (and no search for weak
1327                        lines). If such lines can affect the fitted baseline
1328                        (e.g. a high order polynomial is fitted), increase this
1329                        parameter (usually values up to 8 are reasonable). Most
1330                        users of this method should find the default value
1331                        sufficient.
1332            plot:       plot the fit and the residual. In this each
1333                        indivual fit has to be approved, by typing 'y'
1334                        or 'n'
1335            insitu:     if False a new scantable is returned.
1336                        Otherwise, the scaling is done in-situ
1337                        The default is taken from .asaprc (False)
1338
1339        Example:
1340            scan2=scan.auto_poly_baseline(order=7)
1341        """
1342        if insitu is None: insitu = rcParams['insitu']
1343        varlist = vars()
1344        from asap.asapfitter import fitter
1345        from asap.asaplinefind import linefinder
1346        from asap import _is_sequence_or_number as _is_valid
1347
1348        # check whether edge is set up for each IF individually
1349        individualedge = False;
1350        if len(edge) > 1:
1351            if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1352                individualedge = True;
1353
1354        if not _is_valid(edge, int) and not individualedge:
1355            raise ValueError, "Parameter 'edge' has to be an integer or a \
1356            pair of integers specified as a tuple. Nested tuples are allowed \
1357            to make individual selection for different IFs."
1358
1359        curedge = (0, 0)
1360        if individualedge:
1361            for edgepar in edge:
1362                if not _is_valid(edgepar, int):
1363                    raise ValueError, "Each element of the 'edge' tuple has \
1364                                       to be a pair of integers or an integer."
1365        else:
1366            curedge = edge;
1367
1368        # setup fitter
1369        f = fitter()
1370        f.set_function(poly=order)
1371
1372        # setup line finder
1373        fl = linefinder()
1374        fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
1375
1376        if not insitu:
1377            workscan = self.copy()
1378        else:
1379            workscan = self
1380
1381        fl.set_scan(workscan)
1382
1383        rows = range(workscan.nrow())
1384        asaplog.push("Processing:")
1385        for r in rows:
1386            msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1387                (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1388                 workscan.getpol(r), workscan.getcycle(r))
1389            asaplog.push(msg, False)
1390
1391            # figure out edge parameter
1392            if individualedge:
1393                if len(edge) >= workscan.getif(r):
1394                    raise RuntimeError, "Number of edge elements appear to " \
1395                                        "be less than the number of IFs"
1396                    curedge = edge[workscan.getif(r)]
1397
1398            # setup line finder
1399            fl.find_lines(r, mask, curedge)
1400            f.set_data(workscan._getabcissa(r),  workscan._getspectrum(r),
1401                        mask_and(workscan._getmask(r), fl.get_mask()))
1402            f.fit()
1403            x = f.get_parameters()
1404            if plot:
1405                f.plot(residual=True)
1406                x = raw_input("Accept fit ( [y]/n ): ")
1407                if x.upper() == 'N':
1408                    continue
1409            workscan._setspectrum(f.fitter.getresidual(), r)
1410        if plot:
1411            f._p.unmap()
1412            f._p = None
1413        workscan._add_history("auto_poly_baseline", varlist)
1414        if insitu:
1415            self._assign(workscan)
1416        else:
1417            return workscan
1418
1419    def rotate_linpolphase(self, angle):
1420        """
1421        Rotate the phase of the complex polarization O=Q+iU correlation.
1422        This is always done in situ in the raw data.  So if you call this
1423        function more than once then each call rotates the phase further.
1424        Parameters:
1425            angle:   The angle (degrees) to rotate (add) by.
1426        Examples:
1427            scan.rotate_linpolphase(2.3)
1428        """
1429        varlist = vars()
1430        self._math._rotate_linpolphase(self, angle)
1431        self._add_history("rotate_linpolphase", varlist)
1432        print_log()
1433        return
1434
1435
1436    def rotate_xyphase(self, angle):
1437        """
1438        Rotate the phase of the XY correlation.  This is always done in situ
1439        in the data.  So if you call this function more than once
1440        then each call rotates the phase further.
1441        Parameters:
1442            angle:   The angle (degrees) to rotate (add) by.
1443        Examples:
1444            scan.rotate_xyphase(2.3)
1445        """
1446        varlist = vars()
1447        self._math._rotate_xyphase(self, angle)
1448        self._add_history("rotate_xyphase", varlist)
1449        print_log()
1450        return
1451
1452    def swap_linears(self):
1453        """
1454        Swap the linear polarisations XX and YY, or better the first two
1455        polarisations as this also works for ciculars.
1456        """
1457        varlist = vars()
1458        self._math._swap_linears(self)
1459        self._add_history("swap_linears", varlist)
1460        print_log()
1461        return
1462
1463    def invert_phase(self):
1464        """
1465        Invert the phase of the complex polarisation
1466        """
1467        varlist = vars()
1468        self._math._invert_phase(self)
1469        self._add_history("invert_phase", varlist)
1470        print_log()
1471        return
1472
1473    def add(self, offset, insitu=None):
1474        """
1475        Return a scan where all spectra have the offset added
1476        Parameters:
1477            offset:      the offset
1478            insitu:      if False a new scantable is returned.
1479                         Otherwise, the scaling is done in-situ
1480                         The default is taken from .asaprc (False)
1481        """
1482        if insitu is None: insitu = rcParams['insitu']
1483        self._math._setinsitu(insitu)
1484        varlist = vars()
1485        s = scantable(self._math._unaryop(self, offset, "ADD", False))
1486        s._add_history("add", varlist)
1487        print_log()
1488        if insitu:
1489            self._assign(s)
1490        else:
1491            return s
1492
1493    def scale(self, factor, tsys=True, insitu=None):
1494        """
1495        Return a scan where all spectra are scaled by the give 'factor'
1496        Parameters:
1497            factor:      the scaling factor
1498            insitu:      if False a new scantable is returned.
1499                         Otherwise, the scaling is done in-situ
1500                         The default is taken from .asaprc (False)
1501            tsys:        if True (default) then apply the operation to Tsys
1502                         as well as the data
1503        """
1504        if insitu is None: insitu = rcParams['insitu']
1505        self._math._setinsitu(insitu)
1506        varlist = vars()
1507        s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
1508        s._add_history("scale", varlist)
1509        print_log()
1510        if insitu:
1511            self._assign(s)
1512        else:
1513            return s
1514
1515    def set_sourcetype(self, match, matchtype="pattern",
1516                       sourcetype="reference"):
1517        """
1518        Set the type of the source to be an source or reference scan
1519        using the provided pattern:
1520        Parameters:
1521            match:          a Unix style pattern, regular expression or selector
1522            matchtype:      'pattern' (default) UNIX style pattern or
1523                            'regex' regular expression
1524            sourcetype:     the type of the source to use (source/reference)
1525        """
1526        varlist = vars()
1527        basesel = self.get_selection()
1528        stype = -1
1529        if sourcetype.lower().startswith("r"):
1530            stype = 1
1531        elif sourcetype.lower().startswith("s"):
1532            stype = 0
1533        else:
1534            raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
1535        if matchtype.lower().startswith("p"):
1536            matchtype = "pattern"
1537        elif matchtype.lower().startswith("r"):
1538            matchtype = "regex"
1539        else:
1540            raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
1541        sel = selector()
1542        if isinstance(match, selector):
1543            sel = match
1544        else:
1545            sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
1546        self.set_selection(basesel+sel)
1547        self._setsourcetype(stype)
1548        self.set_selection(basesel)
1549        s._add_history("set_sourcetype", varlist)
1550
1551    def auto_quotient(self, preserve=True, mode='paired'):
1552        """
1553        This function allows to build quotients automatically.
1554        It assumes the observation to have the same numer of
1555        "ons" and "offs"
1556        Parameters:
1557            preserve:       you can preserve (default) the continuum or
1558                            remove it.  The equations used are
1559                            preserve: Output = Toff * (on/off) - Toff
1560                            remove:   Output = Toff * (on/off) - Ton
1561            mode:           the on/off detection mode
1562                            'paired' (default)
1563                            identifies 'off' scans by the
1564                            trailing '_R' (Mopra/Parkes) or
1565                            '_e'/'_w' (Tid) and matches
1566                            on/off pairs from the observing pattern
1567                            'time'
1568                            finds the closest off in time
1569
1570        """
1571        modes = ["time", "paired"]
1572        if not mode in modes:
1573            msg = "please provide valid mode. Valid modes are %s" % (modes)
1574            raise ValueError(msg)
1575        varlist = vars()
1576        s = None
1577        if mode.lower() == "paired":
1578            basesel = self.get_selection()
1579            sel = selector()+basesel
1580            sel.set_query("SRCTYPE==1")
1581            self.set_selection(sel)
1582            offs = self.copy()
1583            sel.set_query("SRCTYPE==0")
1584            self.set_selection(sel)
1585            ons = self.copy()
1586            s = scantable(self._math._quotient(ons, offs, preserve))
1587            self.set_selection(basesel)
1588        elif mode.lower() == "time":
1589            s = scantable(self._math._auto_quotient(self, mode, preserve))
1590        s._add_history("auto_quotient", varlist)
1591        print_log()
1592        return s
1593
1594    def mx_quotient(self, mask = None, weight='median', preserve=True):
1595        """
1596        Form a quotient using "off" beams when observing in "MX" mode.
1597        Parameters:
1598            mask:           an optional mask to be used when weight == 'stddev'
1599            weight:         How to average the off beams.  Default is 'median'.
1600            preserve:       you can preserve (default) the continuum or
1601                            remove it.  The equations used are
1602                            preserve: Output = Toff * (on/off) - Toff
1603                            remove:   Output = Toff * (on/off) - Ton
1604        """
1605        if mask is None: mask = ()
1606        varlist = vars()
1607        on = scantable(self._math._mx_extract(self, 'on'))
1608        preoff = scantable(self._math._mx_extract(self, 'off'))
1609        off = preoff.average_time(mask=mask, weight=weight, scanav=False)
1610        from asapmath  import quotient
1611        q = quotient(on, off, preserve)
1612        q._add_history("mx_quotient", varlist)
1613        print_log()
1614        return q
1615
1616    def freq_switch(self, insitu=None):
1617        """
1618        Apply frequency switching to the data.
1619        Parameters:
1620            insitu:      if False a new scantable is returned.
1621                         Otherwise, the swictching is done in-situ
1622                         The default is taken from .asaprc (False)
1623        Example:
1624            none
1625        """
1626        if insitu is None: insitu = rcParams['insitu']
1627        self._math._setinsitu(insitu)
1628        varlist = vars()
1629        s = scantable(self._math._freqswitch(self))
1630        s._add_history("freq_switch", varlist)
1631        print_log()
1632        if insitu: self._assign(s)
1633        else: return s
1634
1635    def recalc_azel(self):
1636        """
1637        Recalculate the azimuth and elevation for each position.
1638        Parameters:
1639            none
1640        Example:
1641        """
1642        varlist = vars()
1643        self._recalcazel()
1644        self._add_history("recalc_azel", varlist)
1645        print_log()
1646        return
1647
1648    def __add__(self, other):
1649        varlist = vars()
1650        s = None
1651        if isinstance(other, scantable):
1652            s = scantable(self._math._binaryop(self, other, "ADD"))
1653        elif isinstance(other, float):
1654            s = scantable(self._math._unaryop(self, other, "ADD", False))
1655        else:
1656            raise TypeError("Other input is not a scantable or float value")
1657        s._add_history("operator +", varlist)
1658        print_log()
1659        return s
1660
1661    def __sub__(self, other):
1662        """
1663        implicit on all axes and on Tsys
1664        """
1665        varlist = vars()
1666        s = None
1667        if isinstance(other, scantable):
1668            s = scantable(self._math._binaryop(self, other, "SUB"))
1669        elif isinstance(other, float):
1670            s = scantable(self._math._unaryop(self, other, "SUB", False))
1671        else:
1672            raise TypeError("Other input is not a scantable or float value")
1673        s._add_history("operator -", varlist)
1674        print_log()
1675        return s
1676
1677    def __mul__(self, other):
1678        """
1679        implicit on all axes and on Tsys
1680        """
1681        varlist = vars()
1682        s = None
1683        if isinstance(other, scantable):
1684            s = scantable(self._math._binaryop(self, other, "MUL"))
1685        elif isinstance(other, float):
1686            s = scantable(self._math._unaryop(self, other, "MUL", False))
1687        else:
1688            raise TypeError("Other input is not a scantable or float value")
1689        s._add_history("operator *", varlist)
1690        print_log()
1691        return s
1692
1693
1694    def __div__(self, other):
1695        """
1696        implicit on all axes and on Tsys
1697        """
1698        varlist = vars()
1699        s = None
1700        if isinstance(other, scantable):
1701            s = scantable(self._math._binaryop(self, other, "DIV"))
1702        elif isinstance(other, float):
1703            if other == 0.0:
1704                raise ZeroDivisionError("Dividing by zero is not recommended")
1705            s = scantable(self._math._unaryop(self, other, "DIV", False))
1706        else:
1707            raise TypeError("Other input is not a scantable or float value")
1708        s._add_history("operator /", varlist)
1709        print_log()
1710        return s
1711
1712    def get_fit(self, row=0):
1713        """
1714        Print or return the stored fits for a row in the scantable
1715        Parameters:
1716            row:    the row which the fit has been applied to.
1717        """
1718        if row > self.nrow():
1719            return
1720        from asap.asapfit import asapfit
1721        fit = asapfit(self._getfit(row))
1722        if rcParams['verbose']:
1723            print fit
1724            return
1725        else:
1726            return fit.as_dict()
1727
1728    def flag_nans(self):
1729        """
1730        Utility function to flag NaN values in the scantable.
1731        """
1732        import numpy
1733        basesel = self.get_selection()
1734        for i in range(self.nrow()):
1735            sel = selector()+basesel
1736            sel.set_scans(self.getscan(i))
1737            sel.set_beams(self.getbeam(i))
1738            sel.set_ifs(self.getif(i))
1739            sel.set_polarisations(self.getpol(i))
1740            self.set_selection(sel)
1741            nans = numpy.isnan(self._getspectrum(0))
1742        if numpy.any(nans):
1743            bnans = [ bool(v) for v in nans]
1744            self.flag(bnans)
1745        self.set_selection(basesel)
1746       
1747
1748    def _add_history(self, funcname, parameters):
1749        if not rcParams['scantable.history']:
1750            return
1751        # create date
1752        sep = "##"
1753        from datetime import datetime
1754        dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1755        hist = dstr+sep
1756        hist += funcname+sep#cdate+sep
1757        if parameters.has_key('self'): del parameters['self']
1758        for k, v in parameters.iteritems():
1759            if type(v) is dict:
1760                for k2, v2 in v.iteritems():
1761                    hist += k2
1762                    hist += "="
1763                    if isinstance(v2, scantable):
1764                        hist += 'scantable'
1765                    elif k2 == 'mask':
1766                        if isinstance(v2, list) or isinstance(v2, tuple):
1767                            hist += str(self._zip_mask(v2))
1768                        else:
1769                            hist += str(v2)
1770                    else:
1771                        hist += str(v2)
1772            else:
1773                hist += k
1774                hist += "="
1775                if isinstance(v, scantable):
1776                    hist += 'scantable'
1777                elif k == 'mask':
1778                    if isinstance(v, list) or isinstance(v, tuple):
1779                        hist += str(self._zip_mask(v))
1780                    else:
1781                        hist += str(v)
1782                else:
1783                    hist += str(v)
1784            hist += sep
1785        hist = hist[:-2] # remove trailing '##'
1786        self._addhistory(hist)
1787
1788
1789    def _zip_mask(self, mask):
1790        mask = list(mask)
1791        i = 0
1792        segments = []
1793        while mask[i:].count(1):
1794            i += mask[i:].index(1)
1795            if mask[i:].count(0):
1796                j = i + mask[i:].index(0)
1797            else:
1798                j = len(mask)
1799            segments.append([i, j])
1800            i = j
1801        return segments
1802
1803    def _get_ordinate_label(self):
1804        fu = "("+self.get_fluxunit()+")"
1805        import re
1806        lbl = "Intensity"
1807        if re.match(".K.", fu):
1808            lbl = "Brightness Temperature "+ fu
1809        elif re.match(".Jy.", fu):
1810            lbl = "Flux density "+ fu
1811        return lbl
1812
1813    def _check_ifs(self):
1814        nchans = [self.nchan(i) for i in range(self.nif(-1))]
1815        nchans = filter(lambda t: t > 0, nchans)
1816        return (sum(nchans)/len(nchans) == nchans[0])
1817
1818    def _fill(self, names, unit, average):
1819        import os
1820        from asap._asap import stfiller
1821        first = True
1822        fullnames = []
1823        for name in names:
1824            name = os.path.expandvars(name)
1825            name = os.path.expanduser(name)
1826            if not os.path.exists(name):
1827                msg = "File '%s' does not exists" % (name)
1828                if rcParams['verbose']:
1829                    asaplog.push(msg)
1830                    print asaplog.pop().strip()
1831                    return
1832                raise IOError(msg)
1833            fullnames.append(name)
1834        if average:
1835            asaplog.push('Auto averaging integrations')
1836        stype = int(rcParams['scantable.storage'].lower() == 'disk')
1837        for name in fullnames:
1838            tbl = Scantable(stype)
1839            r = stfiller(tbl)
1840            rx = rcParams['scantable.reference']
1841            r._setreferenceexpr(rx)
1842            msg = "Importing %s..." % (name)
1843            asaplog.push(msg, False)
1844            print_log()
1845            r._open(name, -1, -1)
1846            r._read()
1847            if average:
1848                tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
1849            if not first:
1850                tbl = self._math._merge([self, tbl])
1851            Scantable.__init__(self, tbl)
1852            r._close()
1853            del r, tbl
1854            first = False
1855        if unit is not None:
1856            self.set_fluxunit(unit)
1857        self.set_freqframe(rcParams['scantable.freqframe'])
1858
1859    def __getitem__(self, key):
1860        if key < 0:
1861            key += self.nrow()
1862        if key >= self.nrow():
1863            raise IndexError("Row index out of range.")
1864        return self._getspectrum(key)
1865
1866    def __setitem__(self, key, value):
1867        if key < 0:
1868            key += self.nrow()
1869        if key >= self.nrow():
1870            raise IndexError("Row index out of range.")
1871        if not hasattr(value, "__len__") or \
1872                len(value) > self.nchan(self.getif(key)):
1873            raise ValueError("Spectrum length doesn't match.")
1874        return self._setspectrum(value, key)
1875
1876    def __len__(self):
1877        return self.nrow()
1878
1879    def __iter__(self):
1880        for i in range(len(self)):
1881            yield self[i]
Note: See TracBrowser for help on using the repository browser.