source: trunk/python/scantable.py @ 1590

Last change on this file since 1590 was 1590, checked in by Malte Marquarding, 15 years ago

Tidy up/factored out printin part of stats to _row_callback

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 69.8 KB
Line 
1from asap._asap import Scantable
2from asap import rcParams
3from asap import print_log, print_log_dec
4from asap import asaplog
5from asap import selector
6from asap import linecatalog
7from asap import _n_bools, mask_not, mask_and, mask_or
8
9class scantable(Scantable):
10    """
11        The ASAP container for scans
12    """
13    @print_log_dec
14    def __init__(self, filename, average=None, unit=None, parallactify=None):
15        """
16        Create a scantable from a saved one or make a reference
17        Parameters:
18            filename:    the name of an asap table on disk
19                         or
20                         the name of a rpfits/sdfits/ms file
21                         (integrations within scans are auto averaged
22                         and the whole file is read)
23                         or
24                         [advanced] a reference to an existing
25                         scantable
26            average:     average all integrations withinb a scan on read.
27                         The default (True) is taken from .asaprc.
28            unit:         brightness unit; must be consistent with K or Jy.
29                         Over-rides the default selected by the reader
30                         (input rpfits/sdfits/ms) or replaces the value
31                         in existing scantables
32            parallactify: Indcicate that the data had been parallatified.
33                          Default is taken form rc file.
34        """
35        if average is None:
36            average = rcParams['scantable.autoaverage']
37        if parallactify is None:
38            parallactify = rcParams['scantable.parallactify']
39        varlist = vars()
40        from asap._asap import stmath
41        self._math = stmath()
42        if isinstance(filename, Scantable):
43            Scantable.__init__(self, filename)
44        else:
45            if isinstance(filename, str):# or \
46#                (isinstance(filename, list) or isinstance(filename, tuple)) \
47#                  and isinstance(filename[-1], str):
48                import os.path
49                filename = os.path.expandvars(filename)
50                filename = os.path.expanduser(filename)
51                if not os.path.exists(filename):
52                    s = "File '%s' not found." % (filename)
53                    if rcParams['verbose']:
54                        asaplog.push(s)
55                        #print asaplog.pop().strip()
56                        return
57                    raise IOError(s)
58                if os.path.isdir(filename) \
59                    and not os.path.exists(filename+'/table.f1'):
60                    # crude check if asap table
61                    if os.path.exists(filename+'/table.info'):
62                        ondisk = rcParams['scantable.storage'] == 'disk'
63                        Scantable.__init__(self, filename, ondisk)
64                        if unit is not None:
65                            self.set_fluxunit(unit)
66                        self.set_freqframe(rcParams['scantable.freqframe'])
67                    else:
68                        msg = "The given file '%s'is not a valid " \
69                              "asap table." % (filename)
70                        if rcParams['verbose']:
71                            print msg
72                            return
73                        else:
74                            raise IOError(msg)
75                else:
76                    self._fill([filename], unit, average)
77            elif (isinstance(filename, list) or isinstance(filename, tuple)) \
78                  and isinstance(filename[-1], str):
79                self._fill(filename, unit, average)
80        self.parallactify(parallactify)
81        self._add_history("scantable", varlist)
82
83    @print_log_dec
84    def save(self, name=None, format=None, overwrite=False):
85        """
86        Store the scantable on disk. This can be an asap (aips++) Table,
87        SDFITS or MS2 format.
88        Parameters:
89            name:        the name of the outputfile. For format "ASCII"
90                         this is the root file name (data in 'name'.txt
91                         and header in 'name'_header.txt)
92            format:      an optional file format. Default is ASAP.
93                         Allowed are - 'ASAP' (save as ASAP [aips++] Table),
94                                       'SDFITS' (save as SDFITS file)
95                                       'ASCII' (saves as ascii text file)
96                                       'MS2' (saves as an aips++
97                                              MeasurementSet V2)
98                                       'FITS' (save as image FITS - not
99                                               readable by class)
100                                       'CLASS' (save as FITS readable by CLASS)
101            overwrite:   If the file should be overwritten if it exists.
102                         The default False is to return with warning
103                         without writing the output. USE WITH CARE.
104        Example:
105            scan.save('myscan.asap')
106            scan.save('myscan.sdfits', 'SDFITS')
107        """
108        from os import path
109        if format is None: format = rcParams['scantable.save']
110        suffix = '.'+format.lower()
111        if name is None or name == "":
112            name = 'scantable'+suffix
113            msg = "No filename given. Using default name %s..." % name
114            asaplog.push(msg)
115        name = path.expandvars(name)
116        if path.isfile(name) or path.isdir(name):
117            if not overwrite:
118                msg = "File %s exists." % name
119                if rcParams['verbose']:
120                    print msg
121                    return
122                else:
123                    raise IOError(msg)
124        format2 = format.upper()
125        if format2 == 'ASAP':
126            self._save(name)
127        else:
128            from asap._asap import stwriter as stw
129            writer = stw(format2)
130            writer.write(self, name)
131        return
132
133    def copy(self):
134        """
135        Return a copy of this scantable.
136        Note:
137            This makes a full (deep) copy. scan2 = scan1 makes a reference.
138        Parameters:
139            none
140        Example:
141            copiedscan = scan.copy()
142        """
143        sd = scantable(Scantable._copy(self))
144        return sd
145
146    def drop_scan(self, scanid=None):
147        """
148        Return a new scantable where the specified scan number(s) has(have)
149        been dropped.
150        Parameters:
151            scanid:    a (list of) scan number(s)
152        """
153        from asap import _is_sequence_or_number as _is_valid
154        from asap import _to_list
155        from asap import unique
156        if not _is_valid(scanid):
157            if rcParams['verbose']:
158                print "Please specify a scanno to drop from the scantable"
159                return
160            else:
161                raise RuntimeError("No scan given")
162        try:
163            scanid = _to_list(scanid)
164            allscans = unique([ self.getscan(i) for i in range(self.nrow())])
165            for sid in scanid: allscans.remove(sid)
166            if len(allscans) == 0:
167                raise ValueError("Can't remove all scans")
168        except ValueError:
169            if rcParams['verbose']:
170                print "Couldn't find any match."
171                return
172            else: raise
173        try:
174            bsel = self.get_selection()
175            sel = selector()
176            sel.set_scans(allscans)
177            self.set_selection(bsel+sel)
178            scopy = self._copy()
179            self.set_selection(bsel)
180            return scantable(scopy)
181        except RuntimeError:
182            if rcParams['verbose']:
183                print "Couldn't find any match."
184            else:
185                raise
186
187
188    def get_scan(self, scanid=None):
189        """
190        Return a specific scan (by scanno) or collection of scans (by
191        source name) in a new scantable.
192        Note:
193            See scantable.drop_scan() for the inverse operation.
194        Parameters:
195            scanid:    a (list of) scanno or a source name, unix-style
196                       patterns are accepted for source name matching, e.g.
197                       '*_R' gets all 'ref scans
198        Example:
199            # get all scans containing the source '323p459'
200            newscan = scan.get_scan('323p459')
201            # get all 'off' scans
202            refscans = scan.get_scan('*_R')
203            # get a susbset of scans by scanno (as listed in scan.summary())
204            newscan = scan.get_scan([0, 2, 7, 10])
205        """
206        if scanid is None:
207            if rcParams['verbose']:
208                print "Please specify a scan no or name to " \
209                      "retrieve from the scantable"
210                return
211            else:
212                raise RuntimeError("No scan given")
213
214        try:
215            bsel = self.get_selection()
216            sel = selector()
217            if type(scanid) is str:
218                sel.set_name(scanid)
219                self.set_selection(bsel+sel)
220                scopy = self._copy()
221                self.set_selection(bsel)
222                return scantable(scopy)
223            elif type(scanid) is int:
224                sel.set_scans([scanid])
225                self.set_selection(bsel+sel)
226                scopy = self._copy()
227                self.set_selection(bsel)
228                return scantable(scopy)
229            elif type(scanid) is list:
230                sel.set_scans(scanid)
231                self.set_selection(sel)
232                scopy = self._copy()
233                self.set_selection(bsel)
234                return scantable(scopy)
235            else:
236                msg = "Illegal scanid type, use 'int' or 'list' if ints."
237                if rcParams['verbose']:
238                    print msg
239                else:
240                    raise TypeError(msg)
241        except RuntimeError:
242            if rcParams['verbose']: print "Couldn't find any match."
243            else: raise
244
245    def __str__(self):
246        return Scantable._summary(self, True)
247
248    def summary(self, filename=None):
249        """
250        Print a summary of the contents of this scantable.
251        Parameters:
252            filename:    the name of a file to write the putput to
253                         Default - no file output
254            verbose:     print extra info such as the frequency table
255                         The default (False) is taken from .asaprc
256        """
257        info = Scantable._summary(self, True)
258        #if verbose is None: verbose = rcParams['scantable.verbosesummary']
259        if filename is not None:
260            if filename is "":
261                filename = 'scantable_summary.txt'
262            from os.path import expandvars, isdir
263            filename = expandvars(filename)
264            if not isdir(filename):
265                data = open(filename, 'w')
266                data.write(info)
267                data.close()
268            else:
269                msg = "Illegal file name '%s'." % (filename)
270                if rcParams['verbose']:
271                    print msg
272                else:
273                    raise IOError(msg)
274        if rcParams['verbose']:
275            try:
276                from IPython.genutils import page as pager
277            except ImportError:
278                from pydoc import pager
279            pager(info)
280        else:
281            return info
282
283    def get_spectrum(self, rowno):
284        """Return the spectrum for the current row in the scantable as a list.
285        Parameters:
286             rowno:   the row number to retrieve the spectrum from
287        """
288        return self._getspectrum(rowno)
289
290    def get_mask(self, rowno):
291        """Return the mask for the current row in the scantable as a list.
292        Parameters:
293             rowno:   the row number to retrieve the mask from
294        """
295        return self._getmask(rowno)
296
297    def set_spectrum(self, spec, rowno):
298        """Return the spectrum for the current row in the scantable as a list.
299        Parameters:
300             spec:   the spectrum
301             rowno:    the row number to set the spectrum for
302        """
303        assert(len(spec) == self.nchan())
304        return self._setspectrum(spec, rowno)
305
306    def get_selection(self):
307        """
308        Get the selection object currently set on this scantable.
309        Parameters:
310            none
311        Example:
312            sel = scan.get_selection()
313            sel.set_ifs(0)              # select IF 0
314            scan.set_selection(sel)     # apply modified selection
315        """
316        return selector(self._getselection())
317
318    def set_selection(self, selection=None, **kw):
319        """
320        Select a subset of the data. All following operations on this scantable
321        are only applied to thi selection.
322        Parameters:
323            selection:    a selector object (default unset the selection)
324        Examples:
325            sel = selector()         # create a selection object
326            self.set_scans([0, 3])    # select SCANNO 0 and 3
327            scan.set_selection(sel)  # set the selection
328            scan.summary()           # will only print summary of scanno 0 an 3
329            scan.set_selection()     # unset the selection
330        """
331        if selection is None:
332            # reset
333            if len(kw) == 0:
334                selection = selector()
335            else:
336                # try keywords
337                for k in kw:
338                    if k not in selector.fields:
339                        raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
340                selection = selector(**kw)
341        self._setselection(selection)
342
343    def stats(self, stat='stddev', mask=None):
344        """
345        Determine the specified statistic of the current beam/if/pol
346        Takes a 'mask' as an optional parameter to specify which
347        channels should be excluded.
348        Parameters:
349            stat:    'min', 'max', 'sumsq', 'sum', 'mean'
350                     'var', 'stddev', 'avdev', 'rms', 'median'
351            mask:    an optional mask specifying where the statistic
352                     should be determined.
353        Example:
354            scan.set_unit('channel')
355            msk = scan.create_mask([100, 200], [500, 600])
356            scan.stats(stat='mean', mask=m)
357        """
358        if mask == None:
359            mask = []
360        if not self._check_ifs():
361            raise ValueError("Cannot apply mask as the IFs have different "
362                             "number of channels. Please use setselection() "
363                             "to select individual IFs")
364
365        statvals = self._math._stats(self, mask, stat)
366        def cb(i):
367            return statvals[i]
368
369        return self._row_callback(cb, stat)
370
371    def stddev(self, mask=None):
372        """
373        Determine the standard deviation of the current beam/if/pol
374        Takes a 'mask' as an optional parameter to specify which
375        channels should be excluded.
376        Parameters:
377            mask:    an optional mask specifying where the standard
378                     deviation should be determined.
379
380        Example:
381            scan.set_unit('channel')
382            msk = scan.create_mask([100, 200], [500, 600])
383            scan.stddev(mask=m)
384        """
385        return self.stats(stat='stddev', mask=mask);
386
387
388    def get_column_names(self):
389        """
390        Return a  list of column names, which can be used for selection.
391        """
392        return list(Scantable.get_column_names(self))
393
394    def get_tsys(self):
395        """
396        Return the System temperatures.
397        Returns:
398            a list of Tsys values for the current selection
399        """
400
401        return self._row_callback(self._gettsys, "Tsys")
402
403    def _row_callback(self, callback, label):
404        out = ""
405        outvec = []
406        sep = '-'*50
407        for i in range(self.nrow()):
408            tm = self._gettime(i)
409            src = self._getsourcename(i)
410            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
411            out += 'Time[%s]:\n' % (tm)
412            if self.nbeam(-1) > 1:
413                out +=  ' Beam[%d] ' % (self.getbeam(i))
414            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (self.getif(i))
415            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (self.getpol(i))
416            outvec.append(callback(i))
417            out += '= %3.3f\n' % (outvec[i])
418            out +=  sep+'\n'
419        if rcParams['verbose']:
420            print sep
421            print " %s" % (label)
422            print sep
423            print out
424        return outvec
425
426    def _get_column(self, callback, row=-1):
427        """
428        """
429        if row == -1:
430            return [callback(i) for i in range(self.nrow())]
431        else:
432            if  0 <= row < self.nrow():
433                return callback(row)
434
435
436    def get_time(self, row=-1, asdatetime=False):
437        """
438        Get a list of time stamps for the observations.
439        Return a datetime object for each integration time stamp in the scantable.
440        Parameters:
441            row:          row no of integration. Default -1 return all rows
442            asdatetime:   return values as datetime objects rather than strings
443        Example:
444            none
445        """
446        from time import strptime
447        from datetime import datetime
448        times = self._get_column(self._gettime, row)
449        if not asdatetime:
450            return times
451        format = "%Y/%m/%d/%H:%M:%S"
452        if isinstance(times, list):
453            return [datetime(*strptime(i, format)[:6]) for i in times]
454        else:
455            return datetime(*strptime(times, format)[:6])
456
457
458    def get_inttime(self, row=-1):
459        """
460        Get a list of integration times for the observations.
461        Return a time in seconds for each integration in the scantable.
462        Parameters:
463            row:    row no of integration. Default -1 return all rows.
464        Example:
465            none
466        """
467        return self._get_column(self._getinttime, row)
468
469
470    def get_sourcename(self, row=-1):
471        """
472        Get a list source names for the observations.
473        Return a string for each integration in the scantable.
474        Parameters:
475            row:    row no of integration. Default -1 return all rows.
476        Example:
477            none
478        """
479        return self._get_column(self._getsourcename, row)
480
481    def get_elevation(self, row=-1):
482        """
483        Get a list of elevations for the observations.
484        Return a float for each integration in the scantable.
485        Parameters:
486            row:    row no of integration. Default -1 return all rows.
487        Example:
488            none
489        """
490        return self._get_column(self._getelevation, row)
491
492    def get_azimuth(self, row=-1):
493        """
494        Get a list of azimuths for the observations.
495        Return a float for each integration in the scantable.
496        Parameters:
497            row:    row no of integration. Default -1 return all rows.
498        Example:
499            none
500        """
501        return self._get_column(self._getazimuth, row)
502
503    def get_parangle(self, row=-1):
504        """
505        Get a list of parallactic angles for the observations.
506        Return a float for each integration in the scantable.
507        Parameters:
508            row:    row no of integration. Default -1 return all rows.
509        Example:
510            none
511        """
512        return self._get_column(self._getparangle, row)
513
514    def get_direction(self, row=-1):
515        """
516        Get a list of Positions on the sky (direction) for the observations.
517        Return a float for each integration in the scantable.
518        Parameters:
519            row:    row no of integration. Default -1 return all rows
520        Example:
521            none
522        """
523        return self._get_column(self._getdirection, row)
524
525    def get_directionval(self, row=-1):
526        """
527        Get a list of Positions on the sky (direction) for the observations.
528        Return a float for each integration in the scantable.
529        Parameters:
530            row:    row no of integration. Default -1 return all rows
531        Example:
532            none
533        """
534        return self._get_column(self._getdirectionvec, row)
535
536
537    def set_unit(self, unit='channel'):
538        """
539        Set the unit for all following operations on this scantable
540        Parameters:
541            unit:    optional unit, default is 'channel'
542                     one of '*Hz', 'km/s', 'channel', ''
543        """
544        varlist = vars()
545        if unit in ['', 'pixel', 'channel']:
546            unit = ''
547        inf = list(self._getcoordinfo())
548        inf[0] = unit
549        self._setcoordinfo(inf)
550        self._add_history("set_unit", varlist)
551
552    @print_log_dec
553    def set_instrument(self, instr):
554        """
555        Set the instrument for subsequent processing.
556        Parameters:
557            instr:    Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
558                      'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
559        """
560        self._setInstrument(instr)
561        self._add_history("set_instument", vars())
562
563    @print_log_dec
564    def set_feedtype(self, feedtype):
565        """
566        Overwrite the feed type, which might not be set correctly.
567        Parameters:
568            feedtype:     'linear' or 'circular'
569        """
570        self._setfeedtype(feedtype)
571        self._add_history("set_feedtype", vars())
572
573    @print_log_dec
574    def set_doppler(self, doppler='RADIO'):
575        """
576        Set the doppler for all following operations on this scantable.
577        Parameters:
578            doppler:    One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
579        """
580        varlist = vars()
581        inf = list(self._getcoordinfo())
582        inf[2] = doppler
583        self._setcoordinfo(inf)
584        self._add_history("set_doppler", vars())
585
586    @print_log_dec
587    def set_freqframe(self, frame=None):
588        """
589        Set the frame type of the Spectral Axis.
590        Parameters:
591            frame:   an optional frame type, default 'LSRK'. Valid frames are:
592                     'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
593                     'GEO', 'GALACTO', 'LGROUP', 'CMB'
594        Examples:
595            scan.set_freqframe('BARY')
596        """
597        if frame is None: frame = rcParams['scantable.freqframe']
598        varlist = vars()
599        valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
600                   'GEO', 'GALACTO', 'LGROUP', 'CMB']
601
602        if frame in valid:
603            inf = list(self._getcoordinfo())
604            inf[1] = frame
605            self._setcoordinfo(inf)
606            self._add_history("set_freqframe", varlist)
607        else:
608            msg  = "Please specify a valid freq type. Valid types are:\n", valid
609            if rcParams['verbose']:
610                print msg
611            else:
612                raise TypeError(msg)
613
614    def set_dirframe(self, frame=""):
615        """
616        Set the frame type of the Direction on the sky.
617        Parameters:
618            frame:   an optional frame type, default ''. Valid frames are:
619                     'J2000', 'B1950', 'GALACTIC'
620        Examples:
621            scan.set_dirframe('GALACTIC')
622        """
623        varlist = vars()
624        try:
625            Scantable.set_dirframe(self, frame)
626        except RuntimeError, msg:
627            if rcParams['verbose']:
628                print msg
629            else:
630                raise
631        self._add_history("set_dirframe", varlist)
632
633    def get_unit(self):
634        """
635        Get the default unit set in this scantable
636        Returns:
637            A unit string
638        """
639        inf = self._getcoordinfo()
640        unit = inf[0]
641        if unit == '': unit = 'channel'
642        return unit
643
644    def get_abcissa(self, rowno=0):
645        """
646        Get the abcissa in the current coordinate setup for the currently
647        selected Beam/IF/Pol
648        Parameters:
649            rowno:    an optional row number in the scantable. Default is the
650                      first row, i.e. rowno=0
651        Returns:
652            The abcissa values and the format string (as a dictionary)
653        """
654        abc = self._getabcissa(rowno)
655        lbl = self._getabcissalabel(rowno)
656        return abc, lbl
657
658    def flag(self, mask=None):
659        """
660        Flag the selected data using an optional channel mask.
661        Parameters:
662            mask:   an optional channel mask, created with create_mask. Default
663                    (no mask) is all channels.
664        """
665        varlist = vars()
666        if mask is None:
667            mask = []
668        try:
669            self._flag(mask)
670        except RuntimeError, msg:
671            if rcParams['verbose']:
672                print msg
673                return
674            else: raise
675        self._add_history("flag", varlist)
676
677    @print_log_dec
678    def lag_flag(self, start, end, unit="MHz", insitu=None):
679        """
680        Flag the data in 'lag' space by providing a frequency to remove.
681        Flagged data in the scantable gets interpolated over the region.
682        No taper is applied.
683        Parameters:
684            start:    the start frequency (really a period within the
685                      bandwidth)  or period to remove
686            end:      the end frequency or period to remove
687            unit:     the frequency unit (default "MHz") or "" for
688                      explicit lag channels
689        Notes:
690            It is recommended to flag edges of the band or strong
691            signals beforehand.
692        """
693        if insitu is None: insitu = rcParams['insitu']
694        self._math._setinsitu(insitu)
695        varlist = vars()
696        base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
697        if not (unit == "" or base.has_key(unit)):
698            raise ValueError("%s is not a valid unit." % unit)
699        try:
700            if unit == "":
701                s = scantable(self._math._lag_flag(self, start, end, "lags"))
702            else:
703                s = scantable(self._math._lag_flag(self, start*base[unit],
704                                                   end*base[unit], "frequency"))
705        except RuntimeError, msg:
706            if rcParams['verbose']:
707                print msg
708                return
709            else: raise
710        s._add_history("lag_flag", varlist)
711        if insitu:
712            self._assign(s)
713        else:
714            return s
715
716    @print_log_dec
717    def create_mask(self, *args, **kwargs):
718        """
719        Compute and return a mask based on [min, max] windows.
720        The specified windows are to be INCLUDED, when the mask is
721        applied.
722        Parameters:
723            [min, max], [min2, max2], ...
724                Pairs of start/end points (inclusive)specifying the regions
725                to be masked
726            invert:     optional argument. If specified as True,
727                        return an inverted mask, i.e. the regions
728                        specified are EXCLUDED
729            row:        create the mask using the specified row for
730                        unit conversions, default is row=0
731                        only necessary if frequency varies over rows.
732        Example:
733            scan.set_unit('channel')
734            a)
735            msk = scan.create_mask([400, 500], [800, 900])
736            # masks everything outside 400 and 500
737            # and 800 and 900 in the unit 'channel'
738
739            b)
740            msk = scan.create_mask([400, 500], [800, 900], invert=True)
741            # masks the regions between 400 and 500
742            # and 800 and 900 in the unit 'channel'
743            c)
744            mask only channel 400
745            msk =  scan.create_mask([400])
746        """
747        row = kwargs.get("row", 0)
748        data = self._getabcissa(row)
749        u = self._getcoordinfo()[0]
750        if rcParams['verbose']:
751            if u == "": u = "channel"
752            msg = "The current mask window unit is %s" % u
753            i = self._check_ifs()
754            if not i:
755                msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
756            asaplog.push(msg)
757        n = self.nchan()
758        msk = _n_bools(n, False)
759        # test if args is a 'list' or a 'normal *args - UGLY!!!
760
761        ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
762             and args or args[0]
763        for window in ws:
764            if len(window) == 1:
765                window = [window[0], window[0]]
766            if len(window) == 0 or  len(window) > 2:
767                raise ValueError("A window needs to be defined as [start(, end)]")
768            if window[0] > window[1]:
769                tmp = window[0]
770                window[0] = window[1]
771                window[1] = tmp
772            for i in range(n):
773                if data[i] >= window[0] and data[i] <= window[1]:
774                    msk[i] = True
775        if kwargs.has_key('invert'):
776            if kwargs.get('invert'):
777                msk = mask_not(msk)
778        return msk
779
780    def get_restfreqs(self):
781        """
782        Get the restfrequency(s) stored in this scantable.
783        The return value(s) are always of unit 'Hz'
784        Parameters:
785            none
786        Returns:
787            a list of doubles
788        """
789        return list(self._getrestfreqs())
790
791
792    def set_restfreqs(self, freqs=None, unit='Hz'):
793        """
794        Set or replace the restfrequency specified and
795        If the 'freqs' argument holds a scalar,
796        then that rest frequency will be applied to all the selected
797        data.  If the 'freqs' argument holds
798        a vector, then it MUST be of equal or smaller length than
799        the number of IFs (and the available restfrequencies will be
800        replaced by this vector).  In this case, *all* data have
801        the restfrequency set per IF according
802        to the corresponding value you give in the 'freqs' vector.
803        E.g. 'freqs=[1e9, 2e9]'  would mean IF 0 gets restfreq 1e9 and
804        IF 1 gets restfreq 2e9.
805        You can also specify the frequencies via a linecatalog.
806
807        Parameters:
808            freqs:   list of rest frequency values or string idenitfiers
809            unit:    unit for rest frequency (default 'Hz')
810
811        Example:
812            # set the given restfrequency for the whole table
813            scan.set_restfreqs(freqs=1.4e9)
814            # If thee number of IFs in the data is >= 2 IF0 gets the first
815            # value IF1 the second...
816            scan.set_restfreqs(freqs=[1.4e9, 1.67e9])
817            #set the given restfrequency for the whole table (by name)
818            scan.set_restfreqs(freqs="OH1667")
819
820        Note:
821            To do more sophisticate Restfrequency setting, e.g. on a
822            source and IF basis, use scantable.set_selection() before using
823            this function.
824            # provide your scantable is call scan
825            selection = selector()
826            selection.set_name("ORION*")
827            selection.set_ifs([1])
828            scan.set_selection(selection)
829            scan.set_restfreqs(freqs=86.6e9)
830
831        """
832        varlist = vars()
833        from asap import linecatalog
834        # simple  value
835        if isinstance(freqs, int) or isinstance(freqs, float):
836            self._setrestfreqs(freqs, "",unit)
837        # list of values
838        elif isinstance(freqs, list) or isinstance(freqs, tuple):
839            # list values are scalars
840            if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
841                sel = selector()
842                savesel = self._getselection()
843                iflist = self.getifnos()
844                for i in xrange(len(freqs)):
845                    sel.set_ifs(iflist[i])
846                    self._setselection(sel)
847                    self._setrestfreqs(freqs[i], "",unit)
848                self._setselection(savesel)
849            # list values are tuples, (value, name)
850            elif isinstance(freqs[-1], dict):
851                sel = selector()
852                savesel = self._getselection()
853                iflist = self.getifnos()
854                for i in xrange(len(freqs)):
855                    sel.set_ifs(iflist[i])
856                    self._setselection(sel)
857                    self._setrestfreqs(freqs[i]["value"],
858                                       freqs[i]["name"], "MHz")
859                self._setselection(savesel)
860        # freqs are to be taken from a linecatalog
861        elif isinstance(freqs, linecatalog):
862            sel = selector()
863            savesel = self._getselection()
864            iflist = self.getifnos()
865            for i in xrange(freqs.nrow()):
866                sel.set_ifs(iflist[i])
867                self._setselection(sel)
868                self._setrestfreqs(freqs.get_frequency(i),
869                                   freqs.get_name(i), "MHz")
870                # ensure that we are not iterating past nIF
871                if i == self.nif()-1: break
872            self._setselection(savesel)
873        else:
874            return
875        self._add_history("set_restfreqs", varlist)
876
877    def shift_refpix(self, delta):
878        """
879        Shift the reference pixel of the Spectra Coordinate by an
880        integer amount.
881        Parameters:
882            delta:   the amount to shift by
883        Note:
884            Be careful using this with broadband data.
885        """
886        Scantable.shift(self, delta)
887
888    def history(self, filename=None):
889        """
890        Print the history. Optionally to a file.
891        Parameters:
892            filename:    The name  of the file to save the history to.
893        """
894        hist = list(self._gethistory())
895        out = "-"*80
896        for h in hist:
897            if h.startswith("---"):
898                out += "\n"+h
899            else:
900                items = h.split("##")
901                date = items[0]
902                func = items[1]
903                items = items[2:]
904                out += "\n"+date+"\n"
905                out += "Function: %s\n  Parameters:" % (func)
906                for i in items:
907                    s = i.split("=")
908                    out += "\n   %s = %s" % (s[0], s[1])
909                out += "\n"+"-"*80
910        if filename is not None:
911            if filename is "":
912                filename = 'scantable_history.txt'
913            import os
914            filename = os.path.expandvars(os.path.expanduser(filename))
915            if not os.path.isdir(filename):
916                data = open(filename, 'w')
917                data.write(out)
918                data.close()
919            else:
920                msg = "Illegal file name '%s'." % (filename)
921                if rcParams['verbose']:
922                    print msg
923                else:
924                    raise IOError(msg)
925        if rcParams['verbose']:
926            try:
927                from IPython.genutils import page as pager
928            except ImportError:
929                from pydoc import pager
930            pager(out)
931        else:
932            return out
933        return
934    #
935    # Maths business
936    #
937    @print_log_dec
938    def average_time(self, mask=None, scanav=False, weight='tint', align=False):
939        """
940        Return the (time) weighted average of a scan.
941        Note:
942            in channels only - align if necessary
943        Parameters:
944            mask:     an optional mask (only used for 'var' and 'tsys'
945                      weighting)
946            scanav:   True averages each scan separately
947                      False (default) averages all scans together,
948            weight:   Weighting scheme.
949                      'none'     (mean no weight)
950                      'var'      (1/var(spec) weighted)
951                      'tsys'     (1/Tsys**2 weighted)
952                      'tint'     (integration time weighted)
953                      'tintsys'  (Tint/Tsys**2)
954                      'median'   ( median averaging)
955                      The default is 'tint'
956            align:    align the spectra in velocity before averaging. It takes
957                      the time of the first spectrum as reference time.
958        Example:
959            # time average the scantable without using a mask
960            newscan = scan.average_time()
961        """
962        varlist = vars()
963        if weight is None: weight = 'TINT'
964        if mask is None: mask = ()
965        if scanav: scanav = "SCAN"
966        else: scanav = "NONE"
967        scan = (self, )
968        try:
969            if align:
970                scan = (self.freq_align(insitu=False), )
971            s = None
972            if weight.upper() == 'MEDIAN':
973                s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
974                                                         scanav))
975            else:
976                s = scantable(self._math._average(scan, mask, weight.upper(),
977                              scanav))
978        except RuntimeError, msg:
979            if rcParams['verbose']:
980                print msg
981                return
982            else: raise
983        s._add_history("average_time", varlist)
984        return s
985
986    @print_log_dec
987    def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
988        """
989        Return a scan where all spectra are converted to either
990        Jansky or Kelvin depending upon the flux units of the scan table.
991        By default the function tries to look the values up internally.
992        If it can't find them (or if you want to over-ride), you must
993        specify EITHER jyperk OR eta (and D which it will try to look up
994        also if you don't set it). jyperk takes precedence if you set both.
995        Parameters:
996            jyperk:      the Jy / K conversion factor
997            eta:         the aperture efficiency
998            d:           the geomtric diameter (metres)
999            insitu:      if False a new scantable is returned.
1000                         Otherwise, the scaling is done in-situ
1001                         The default is taken from .asaprc (False)
1002        """
1003        if insitu is None: insitu = rcParams['insitu']
1004        self._math._setinsitu(insitu)
1005        varlist = vars()
1006        if jyperk is None: jyperk = -1.0
1007        if d is None: d = -1.0
1008        if eta is None: eta = -1.0
1009        s = scantable(self._math._convertflux(self, d, eta, jyperk))
1010        s._add_history("convert_flux", varlist)
1011        if insitu: self._assign(s)
1012        else: return s
1013
1014    @print_log_dec
1015    def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1016        """
1017        Return a scan after applying a gain-elevation correction.
1018        The correction can be made via either a polynomial or a
1019        table-based interpolation (and extrapolation if necessary).
1020        You specify polynomial coefficients, an ascii table or neither.
1021        If you specify neither, then a polynomial correction will be made
1022        with built in coefficients known for certain telescopes (an error
1023        will occur if the instrument is not known).
1024        The data and Tsys are *divided* by the scaling factors.
1025        Parameters:
1026            poly:        Polynomial coefficients (default None) to compute a
1027                         gain-elevation correction as a function of
1028                         elevation (in degrees).
1029            filename:    The name of an ascii file holding correction factors.
1030                         The first row of the ascii file must give the column
1031                         names and these MUST include columns
1032                         "ELEVATION" (degrees) and "FACTOR" (multiply data
1033                         by this) somewhere.
1034                         The second row must give the data type of the
1035                         column. Use 'R' for Real and 'I' for Integer.
1036                         An example file would be
1037                         (actual factors are arbitrary) :
1038
1039                         TIME ELEVATION FACTOR
1040                         R R R
1041                         0.1 0 0.8
1042                         0.2 20 0.85
1043                         0.3 40 0.9
1044                         0.4 60 0.85
1045                         0.5 80 0.8
1046                         0.6 90 0.75
1047            method:      Interpolation method when correcting from a table.
1048                         Values are  "nearest", "linear" (default), "cubic"
1049                         and "spline"
1050            insitu:      if False a new scantable is returned.
1051                         Otherwise, the scaling is done in-situ
1052                         The default is taken from .asaprc (False)
1053        """
1054
1055        if insitu is None: insitu = rcParams['insitu']
1056        self._math._setinsitu(insitu)
1057        varlist = vars()
1058        if poly is None:
1059            poly = ()
1060        from os.path import expandvars
1061        filename = expandvars(filename)
1062        s = scantable(self._math._gainel(self, poly, filename, method))
1063        s._add_history("gain_el", varlist)
1064        if insitu: self._assign(s)
1065        else: return s
1066
1067    @print_log_dec
1068    def freq_align(self, reftime=None, method='cubic', insitu=None):
1069        """
1070        Return a scan where all rows have been aligned in frequency/velocity.
1071        The alignment frequency frame (e.g. LSRK) is that set by function
1072        set_freqframe.
1073        Parameters:
1074            reftime:     reference time to align at. By default, the time of
1075                         the first row of data is used.
1076            method:      Interpolation method for regridding the spectra.
1077                         Choose from "nearest", "linear", "cubic" (default)
1078                         and "spline"
1079            insitu:      if False a new scantable is returned.
1080                         Otherwise, the scaling is done in-situ
1081                         The default is taken from .asaprc (False)
1082        """
1083        if insitu is None: insitu = rcParams["insitu"]
1084        self._math._setinsitu(insitu)
1085        varlist = vars()
1086        if reftime is None: reftime = ""
1087        s = scantable(self._math._freq_align(self, reftime, method))
1088        s._add_history("freq_align", varlist)
1089        if insitu: self._assign(s)
1090        else: return s
1091
1092    @print_log_dec
1093    def opacity(self, tau, insitu=None):
1094        """
1095        Apply an opacity correction. The data
1096        and Tsys are multiplied by the correction factor.
1097        Parameters:
1098            tau:         Opacity from which the correction factor is
1099                         exp(tau*ZD)
1100                         where ZD is the zenith-distance
1101            insitu:      if False a new scantable is returned.
1102                         Otherwise, the scaling is done in-situ
1103                         The default is taken from .asaprc (False)
1104        """
1105        if insitu is None: insitu = rcParams['insitu']
1106        self._math._setinsitu(insitu)
1107        varlist = vars()
1108        s = scantable(self._math._opacity(self, tau))
1109        s._add_history("opacity", varlist)
1110        if insitu: self._assign(s)
1111        else: return s
1112
1113    @print_log_dec
1114    def bin(self, width=5, insitu=None):
1115        """
1116        Return a scan where all spectra have been binned up.
1117        Parameters:
1118            width:       The bin width (default=5) in pixels
1119            insitu:      if False a new scantable is returned.
1120                         Otherwise, the scaling is done in-situ
1121                         The default is taken from .asaprc (False)
1122        """
1123        if insitu is None: insitu = rcParams['insitu']
1124        self._math._setinsitu(insitu)
1125        varlist = vars()
1126        s = scantable(self._math._bin(self, width))
1127        s._add_history("bin", varlist)
1128        if insitu:
1129            self._assign(s)
1130        else:
1131            return s
1132
1133    @print_log_dec
1134    def resample(self, width=5, method='cubic', insitu=None):
1135        """
1136        Return a scan where all spectra have been binned up.
1137
1138        Parameters:
1139            width:       The bin width (default=5) in pixels
1140            method:      Interpolation method when correcting from a table.
1141                         Values are  "nearest", "linear", "cubic" (default)
1142                         and "spline"
1143            insitu:      if False a new scantable is returned.
1144                         Otherwise, the scaling is done in-situ
1145                         The default is taken from .asaprc (False)
1146        """
1147        if insitu is None: insitu = rcParams['insitu']
1148        self._math._setinsitu(insitu)
1149        varlist = vars()
1150        s = scantable(self._math._resample(self, method, width))
1151        s._add_history("resample", varlist)
1152        if insitu: self._assign(s)
1153        else: return s
1154
1155    @print_log_dec
1156    def average_pol(self, mask=None, weight='none'):
1157        """
1158        Average the Polarisations together.
1159        Parameters:
1160            mask:        An optional mask defining the region, where the
1161                         averaging will be applied. The output will have all
1162                         specified points masked.
1163            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1164                         weighted), or 'tsys' (1/Tsys**2 weighted)
1165        """
1166        varlist = vars()
1167        if mask is None:
1168            mask = ()
1169        s = scantable(self._math._averagepol(self, mask, weight.upper()))
1170        s._add_history("average_pol", varlist)
1171        return s
1172
1173    @print_log_dec
1174    def average_beam(self, mask=None, weight='none'):
1175        """
1176        Average the Beams together.
1177        Parameters:
1178            mask:        An optional mask defining the region, where the
1179                         averaging will be applied. The output will have all
1180                         specified points masked.
1181            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1182                         weighted), or 'tsys' (1/Tsys**2 weighted)
1183        """
1184        varlist = vars()
1185        if mask is None:
1186            mask = ()
1187        s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1188        s._add_history("average_beam", varlist)
1189        return s
1190
1191    def parallactify(self, pflag):
1192        varlist = vars()
1193        self._parallactify(pflag)
1194        self._add_history("parallactify", varlist)
1195
1196    @print_log_dec
1197    def convert_pol(self, poltype=None):
1198        """
1199        Convert the data to a different polarisation type.
1200        Note that you will need cross-polarisation terms for most conversions.
1201        Parameters:
1202            poltype:    The new polarisation type. Valid types are:
1203                        "linear", "circular", "stokes" and "linpol"
1204        """
1205        varlist = vars()
1206        try:
1207            s = scantable(self._math._convertpol(self, poltype))
1208        except RuntimeError, msg:
1209            if rcParams['verbose']:
1210                print msg
1211                return
1212            else:
1213                raise
1214        s._add_history("convert_pol", varlist)
1215        return s
1216
1217    @print_log_dec
1218    def smooth(self, kernel="hanning", width=5.0, order=2, insitu=None):
1219        """
1220        Smooth the spectrum by the specified kernel (conserving flux).
1221        Parameters:
1222            kernel:     The type of smoothing kernel. Select from
1223                        'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1224                        or 'poly'
1225            width:      The width of the kernel in pixels. For hanning this is
1226                        ignored otherwise it defauls to 5 pixels.
1227                        For 'gaussian' it is the Full Width Half
1228                        Maximum. For 'boxcar' it is the full width.
1229                        For 'rmedian' and 'poly' it is the half width.
1230            order:      Optional parameter for 'poly' kernel (default is 2), to
1231                        specify the order of the polnomial. Ignored by all other
1232                        kernels.
1233            insitu:     if False a new scantable is returned.
1234                        Otherwise, the scaling is done in-situ
1235                        The default is taken from .asaprc (False)
1236        Example:
1237             none
1238        """
1239        if insitu is None: insitu = rcParams['insitu']
1240        self._math._setinsitu(insitu)
1241        varlist = vars()
1242        s = scantable(self._math._smooth(self, kernel.lower(), width, order))
1243        s._add_history("smooth", varlist)
1244        if insitu: self._assign(s)
1245        else: return s
1246
1247    @print_log_dec
1248    def poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None):
1249        """
1250        Return a scan which has been baselined (all rows) by a polynomial.
1251        Parameters:
1252            mask:       an optional mask
1253            order:      the order of the polynomial (default is 0)
1254            plot:       plot the fit and the residual. In this each
1255                        indivual fit has to be approved, by typing 'y'
1256                        or 'n'
1257            uselin:     use linear polynomial fit
1258            insitu:     if False a new scantable is returned.
1259                        Otherwise, the scaling is done in-situ
1260                        The default is taken from .asaprc (False)
1261        Example:
1262            # return a scan baselined by a third order polynomial,
1263            # not using a mask
1264            bscan = scan.poly_baseline(order=3)
1265        """
1266        if insitu is None: insitu = rcParams['insitu']
1267        varlist = vars()
1268        if mask is None:
1269            mask = [True for i in xrange(self.nchan(-1))]
1270        from asap.asapfitter import fitter
1271        try:
1272            f = fitter()
1273            f.set_scan(self, mask)
1274            #f.set_function(poly=order)
1275            if uselin:
1276                f.set_function(lpoly=order)
1277            else:
1278                f.set_function(poly=order)
1279            s = f.auto_fit(insitu, plot=plot)
1280            s._add_history("poly_baseline", varlist)
1281            if insitu: self._assign(s)
1282            else: return s
1283        except RuntimeError:
1284            msg = "The fit failed, possibly because it didn't converge."
1285            if rcParams['verbose']:
1286                print msg
1287                return
1288            else:
1289                raise RuntimeError(msg)
1290
1291    def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
1292                           threshold=3, chan_avg_limit=1, plot=False,
1293                           insitu=None):
1294        """
1295        Return a scan which has been baselined (all rows) by a polynomial.
1296        Spectral lines are detected first using linefinder and masked out
1297        to avoid them affecting the baseline solution.
1298
1299        Parameters:
1300            mask:       an optional mask retreived from scantable
1301            edge:       an optional number of channel to drop at
1302                        the edge of spectrum. If only one value is
1303                        specified, the same number will be dropped from
1304                        both sides of the spectrum. Default is to keep
1305                        all channels. Nested tuples represent individual
1306                        edge selection for different IFs (a number of spectral
1307                        channels can be different)
1308            order:      the order of the polynomial (default is 0)
1309            threshold:  the threshold used by line finder. It is better to
1310                        keep it large as only strong lines affect the
1311                        baseline solution.
1312            chan_avg_limit:
1313                        a maximum number of consequtive spectral channels to
1314                        average during the search of weak and broad lines.
1315                        The default is no averaging (and no search for weak
1316                        lines). If such lines can affect the fitted baseline
1317                        (e.g. a high order polynomial is fitted), increase this
1318                        parameter (usually values up to 8 are reasonable). Most
1319                        users of this method should find the default value
1320                        sufficient.
1321            plot:       plot the fit and the residual. In this each
1322                        indivual fit has to be approved, by typing 'y'
1323                        or 'n'
1324            insitu:     if False a new scantable is returned.
1325                        Otherwise, the scaling is done in-situ
1326                        The default is taken from .asaprc (False)
1327
1328        Example:
1329            scan2=scan.auto_poly_baseline(order=7)
1330        """
1331        if insitu is None: insitu = rcParams['insitu']
1332        varlist = vars()
1333        from asap.asapfitter import fitter
1334        from asap.asaplinefind import linefinder
1335        from asap import _is_sequence_or_number as _is_valid
1336
1337        # check whether edge is set up for each IF individually
1338        individualedge = False;
1339        if len(edge) > 1:
1340            if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1341                individualedge = True;
1342
1343        if not _is_valid(edge, int) and not individualedge:
1344            raise ValueError, "Parameter 'edge' has to be an integer or a \
1345            pair of integers specified as a tuple. Nested tuples are allowed \
1346            to make individual selection for different IFs."
1347
1348        curedge = (0, 0)
1349        if individualedge:
1350            for edgepar in edge:
1351                if not _is_valid(edgepar, int):
1352                    raise ValueError, "Each element of the 'edge' tuple has \
1353                                       to be a pair of integers or an integer."
1354        else:
1355            curedge = edge;
1356
1357        # setup fitter
1358        f = fitter()
1359        f.set_function(poly=order)
1360
1361        # setup line finder
1362        fl = linefinder()
1363        fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
1364
1365        if not insitu:
1366            workscan = self.copy()
1367        else:
1368            workscan = self
1369
1370        fl.set_scan(workscan)
1371
1372        rows = range(workscan.nrow())
1373        asaplog.push("Processing:")
1374        for r in rows:
1375            msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1376                (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1377                 workscan.getpol(r), workscan.getcycle(r))
1378            asaplog.push(msg, False)
1379
1380            # figure out edge parameter
1381            if individualedge:
1382                if len(edge) >= workscan.getif(r):
1383                    raise RuntimeError, "Number of edge elements appear to " \
1384                                        "be less than the number of IFs"
1385                    curedge = edge[workscan.getif(r)]
1386
1387            # setup line finder
1388            fl.find_lines(r, mask, curedge)
1389            f.set_data(workscan._getabcissa(r),  workscan._getspectrum(r),
1390                        mask_and(workscan._getmask(r), fl.get_mask()))
1391            f.fit()
1392            x = f.get_parameters()
1393            if plot:
1394                f.plot(residual=True)
1395                x = raw_input("Accept fit ( [y]/n ): ")
1396                if x.upper() == 'N':
1397                    continue
1398            workscan._setspectrum(f.fitter.getresidual(), r)
1399        if plot:
1400            f._p.unmap()
1401            f._p = None
1402        workscan._add_history("auto_poly_baseline", varlist)
1403        if insitu:
1404            self._assign(workscan)
1405        else:
1406            return workscan
1407
1408    @print_log_dec
1409    def rotate_linpolphase(self, angle):
1410        """
1411        Rotate the phase of the complex polarization O=Q+iU correlation.
1412        This is always done in situ in the raw data.  So if you call this
1413        function more than once then each call rotates the phase further.
1414        Parameters:
1415            angle:   The angle (degrees) to rotate (add) by.
1416        Examples:
1417            scan.rotate_linpolphase(2.3)
1418        """
1419        varlist = vars()
1420        self._math._rotate_linpolphase(self, angle)
1421        self._add_history("rotate_linpolphase", varlist)
1422        return
1423
1424    @print_log_dec
1425    def rotate_xyphase(self, angle):
1426        """
1427        Rotate the phase of the XY correlation.  This is always done in situ
1428        in the data.  So if you call this function more than once
1429        then each call rotates the phase further.
1430        Parameters:
1431            angle:   The angle (degrees) to rotate (add) by.
1432        Examples:
1433            scan.rotate_xyphase(2.3)
1434        """
1435        varlist = vars()
1436        self._math._rotate_xyphase(self, angle)
1437        self._add_history("rotate_xyphase", varlist)
1438        return
1439
1440    @print_log_dec
1441    def swap_linears(self):
1442        """
1443        Swap the linear polarisations XX and YY, or better the first two
1444        polarisations as this also works for ciculars.
1445        """
1446        varlist = vars()
1447        self._math._swap_linears(self)
1448        self._add_history("swap_linears", varlist)
1449        return
1450
1451    @print_log_dec
1452    def invert_phase(self):
1453        """
1454        Invert the phase of the complex polarisation
1455        """
1456        varlist = vars()
1457        self._math._invert_phase(self)
1458        self._add_history("invert_phase", varlist)
1459        return
1460
1461    @print_log_dec
1462    def add(self, offset, insitu=None):
1463        """
1464        Return a scan where all spectra have the offset added
1465        Parameters:
1466            offset:      the offset
1467            insitu:      if False a new scantable is returned.
1468                         Otherwise, the scaling is done in-situ
1469                         The default is taken from .asaprc (False)
1470        """
1471        if insitu is None: insitu = rcParams['insitu']
1472        self._math._setinsitu(insitu)
1473        varlist = vars()
1474        s = scantable(self._math._unaryop(self, offset, "ADD", False))
1475        s._add_history("add", varlist)
1476        if insitu:
1477            self._assign(s)
1478        else:
1479            return s
1480
1481    @print_log_dec
1482    def scale(self, factor, tsys=True, insitu=None):
1483        """
1484        Return a scan where all spectra are scaled by the give 'factor'
1485        Parameters:
1486            factor:      the scaling factor
1487            insitu:      if False a new scantable is returned.
1488                         Otherwise, the scaling is done in-situ
1489                         The default is taken from .asaprc (False)
1490            tsys:        if True (default) then apply the operation to Tsys
1491                         as well as the data
1492        """
1493        if insitu is None: insitu = rcParams['insitu']
1494        self._math._setinsitu(insitu)
1495        varlist = vars()
1496        s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
1497        s._add_history("scale", varlist)
1498        if insitu:
1499            self._assign(s)
1500        else:
1501            return s
1502
1503    def set_sourcetype(self, match, matchtype="pattern",
1504                       sourcetype="reference"):
1505        """
1506        Set the type of the source to be an source or reference scan
1507        using the provided pattern:
1508        Parameters:
1509            match:          a Unix style pattern, regular expression or selector
1510            matchtype:      'pattern' (default) UNIX style pattern or
1511                            'regex' regular expression
1512            sourcetype:     the type of the source to use (source/reference)
1513        """
1514        varlist = vars()
1515        basesel = self.get_selection()
1516        stype = -1
1517        if sourcetype.lower().startswith("r"):
1518            stype = 1
1519        elif sourcetype.lower().startswith("s"):
1520            stype = 0
1521        else:
1522            raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
1523        if matchtype.lower().startswith("p"):
1524            matchtype = "pattern"
1525        elif matchtype.lower().startswith("r"):
1526            matchtype = "regex"
1527        else:
1528            raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
1529        sel = selector()
1530        if isinstance(match, selector):
1531            sel = match
1532        else:
1533            sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
1534        self.set_selection(basesel+sel)
1535        self._setsourcetype(stype)
1536        self.set_selection(basesel)
1537        self._add_history("set_sourcetype", varlist)
1538
1539    @print_log_dec
1540    def auto_quotient(self, preserve=True, mode='paired'):
1541        """
1542        This function allows to build quotients automatically.
1543        It assumes the observation to have the same numer of
1544        "ons" and "offs"
1545        Parameters:
1546            preserve:       you can preserve (default) the continuum or
1547                            remove it.  The equations used are
1548                            preserve: Output = Toff * (on/off) - Toff
1549                            remove:   Output = Toff * (on/off) - Ton
1550            mode:           the on/off detection mode
1551                            'paired' (default)
1552                            identifies 'off' scans by the
1553                            trailing '_R' (Mopra/Parkes) or
1554                            '_e'/'_w' (Tid) and matches
1555                            on/off pairs from the observing pattern
1556                            'time'
1557                            finds the closest off in time
1558
1559        """
1560        modes = ["time", "paired"]
1561        if not mode in modes:
1562            msg = "please provide valid mode. Valid modes are %s" % (modes)
1563            raise ValueError(msg)
1564        varlist = vars()
1565        s = None
1566        if mode.lower() == "paired":
1567            basesel = self.get_selection()
1568            sel = selector()+basesel
1569            sel.set_query("SRCTYPE==1")
1570            self.set_selection(sel)
1571            offs = self.copy()
1572            sel.set_query("SRCTYPE==0")
1573            self.set_selection(sel)
1574            ons = self.copy()
1575            s = scantable(self._math._quotient(ons, offs, preserve))
1576            self.set_selection(basesel)
1577        elif mode.lower() == "time":
1578            s = scantable(self._math._auto_quotient(self, mode, preserve))
1579        s._add_history("auto_quotient", varlist)
1580        return s
1581
1582    @print_log_dec
1583    def mx_quotient(self, mask = None, weight='median', preserve=True):
1584        """
1585        Form a quotient using "off" beams when observing in "MX" mode.
1586        Parameters:
1587            mask:           an optional mask to be used when weight == 'stddev'
1588            weight:         How to average the off beams.  Default is 'median'.
1589            preserve:       you can preserve (default) the continuum or
1590                            remove it.  The equations used are
1591                            preserve: Output = Toff * (on/off) - Toff
1592                            remove:   Output = Toff * (on/off) - Ton
1593        """
1594        if mask is None: mask = ()
1595        varlist = vars()
1596        on = scantable(self._math._mx_extract(self, 'on'))
1597        preoff = scantable(self._math._mx_extract(self, 'off'))
1598        off = preoff.average_time(mask=mask, weight=weight, scanav=False)
1599        from asapmath  import quotient
1600        q = quotient(on, off, preserve)
1601        q._add_history("mx_quotient", varlist)
1602        return q
1603
1604    @print_log_dec
1605    def freq_switch(self, insitu=None):
1606        """
1607        Apply frequency switching to the data.
1608        Parameters:
1609            insitu:      if False a new scantable is returned.
1610                         Otherwise, the swictching is done in-situ
1611                         The default is taken from .asaprc (False)
1612        Example:
1613            none
1614        """
1615        if insitu is None: insitu = rcParams['insitu']
1616        self._math._setinsitu(insitu)
1617        varlist = vars()
1618        s = scantable(self._math._freqswitch(self))
1619        s._add_history("freq_switch", varlist)
1620        if insitu: self._assign(s)
1621        else: return s
1622
1623    @print_log_dec
1624    def recalc_azel(self):
1625        """
1626        Recalculate the azimuth and elevation for each position.
1627        Parameters:
1628            none
1629        Example:
1630        """
1631        varlist = vars()
1632        self._recalcazel()
1633        self._add_history("recalc_azel", varlist)
1634        return
1635
1636    @print_log_dec
1637    def __add__(self, other):
1638        varlist = vars()
1639        s = None
1640        if isinstance(other, scantable):
1641            s = scantable(self._math._binaryop(self, other, "ADD"))
1642        elif isinstance(other, float):
1643            s = scantable(self._math._unaryop(self, other, "ADD", False))
1644        else:
1645            raise TypeError("Other input is not a scantable or float value")
1646        s._add_history("operator +", varlist)
1647        return s
1648
1649    @print_log_dec
1650    def __sub__(self, other):
1651        """
1652        implicit on all axes and on Tsys
1653        """
1654        varlist = vars()
1655        s = None
1656        if isinstance(other, scantable):
1657            s = scantable(self._math._binaryop(self, other, "SUB"))
1658        elif isinstance(other, float):
1659            s = scantable(self._math._unaryop(self, other, "SUB", False))
1660        else:
1661            raise TypeError("Other input is not a scantable or float value")
1662        s._add_history("operator -", varlist)
1663        return s
1664
1665    @print_log_dec
1666    def __mul__(self, other):
1667        """
1668        implicit on all axes and on Tsys
1669        """
1670        varlist = vars()
1671        s = None
1672        if isinstance(other, scantable):
1673            s = scantable(self._math._binaryop(self, other, "MUL"))
1674        elif isinstance(other, float):
1675            s = scantable(self._math._unaryop(self, other, "MUL", False))
1676        else:
1677            raise TypeError("Other input is not a scantable or float value")
1678        s._add_history("operator *", varlist)
1679        return s
1680
1681
1682    @print_log_dec
1683    def __div__(self, other):
1684        """
1685        implicit on all axes and on Tsys
1686        """
1687        varlist = vars()
1688        s = None
1689        if isinstance(other, scantable):
1690            s = scantable(self._math._binaryop(self, other, "DIV"))
1691        elif isinstance(other, float):
1692            if other == 0.0:
1693                raise ZeroDivisionError("Dividing by zero is not recommended")
1694            s = scantable(self._math._unaryop(self, other, "DIV", False))
1695        else:
1696            raise TypeError("Other input is not a scantable or float value")
1697        s._add_history("operator /", varlist)
1698        return s
1699
1700    def get_fit(self, row=0):
1701        """
1702        Print or return the stored fits for a row in the scantable
1703        Parameters:
1704            row:    the row which the fit has been applied to.
1705        """
1706        if row > self.nrow():
1707            return
1708        from asap.asapfit import asapfit
1709        fit = asapfit(self._getfit(row))
1710        if rcParams['verbose']:
1711            print fit
1712            return
1713        else:
1714            return fit.as_dict()
1715
1716    def flag_nans(self):
1717        """
1718        Utility function to flag NaN values in the scantable.
1719        """
1720        import numpy
1721        basesel = self.get_selection()
1722        for i in range(self.nrow()):
1723            sel = self.get_row_selector(i)
1724            self.set_selection(basesel+sel)
1725            nans = numpy.isnan(self._getspectrum(0))
1726        if numpy.any(nans):
1727            bnans = [ bool(v) for v in nans]
1728            self.flag(bnans)
1729        self.set_selection(basesel)
1730
1731    def get_row_selector(self, rowno):
1732        return selector(beams=self.getbeam(rowno),
1733                        ifs=self.getif(rowno),
1734                        pols=self.getpol(rowno),
1735                        scans=self.getscan(rowno),
1736                        cycles=self.getcycle(rowno))
1737
1738    def _add_history(self, funcname, parameters):
1739        if not rcParams['scantable.history']:
1740            return
1741        # create date
1742        sep = "##"
1743        from datetime import datetime
1744        dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1745        hist = dstr+sep
1746        hist += funcname+sep#cdate+sep
1747        if parameters.has_key('self'): del parameters['self']
1748        for k, v in parameters.iteritems():
1749            if type(v) is dict:
1750                for k2, v2 in v.iteritems():
1751                    hist += k2
1752                    hist += "="
1753                    if isinstance(v2, scantable):
1754                        hist += 'scantable'
1755                    elif k2 == 'mask':
1756                        if isinstance(v2, list) or isinstance(v2, tuple):
1757                            hist += str(self._zip_mask(v2))
1758                        else:
1759                            hist += str(v2)
1760                    else:
1761                        hist += str(v2)
1762            else:
1763                hist += k
1764                hist += "="
1765                if isinstance(v, scantable):
1766                    hist += 'scantable'
1767                elif k == 'mask':
1768                    if isinstance(v, list) or isinstance(v, tuple):
1769                        hist += str(self._zip_mask(v))
1770                    else:
1771                        hist += str(v)
1772                else:
1773                    hist += str(v)
1774            hist += sep
1775        hist = hist[:-2] # remove trailing '##'
1776        self._addhistory(hist)
1777
1778
1779    def _zip_mask(self, mask):
1780        mask = list(mask)
1781        i = 0
1782        segments = []
1783        while mask[i:].count(1):
1784            i += mask[i:].index(1)
1785            if mask[i:].count(0):
1786                j = i + mask[i:].index(0)
1787            else:
1788                j = len(mask)
1789            segments.append([i, j])
1790            i = j
1791        return segments
1792
1793    def _get_ordinate_label(self):
1794        fu = "("+self.get_fluxunit()+")"
1795        import re
1796        lbl = "Intensity"
1797        if re.match(".K.", fu):
1798            lbl = "Brightness Temperature "+ fu
1799        elif re.match(".Jy.", fu):
1800            lbl = "Flux density "+ fu
1801        return lbl
1802
1803    def _check_ifs(self):
1804        nchans = [self.nchan(i) for i in range(self.nif(-1))]
1805        nchans = filter(lambda t: t > 0, nchans)
1806        return (sum(nchans)/len(nchans) == nchans[0])
1807
1808    def _fill(self, names, unit, average):
1809        import os
1810        from asap._asap import stfiller
1811        first = True
1812        fullnames = []
1813        for name in names:
1814            name = os.path.expandvars(name)
1815            name = os.path.expanduser(name)
1816            if not os.path.exists(name):
1817                msg = "File '%s' does not exists" % (name)
1818                if rcParams['verbose']:
1819                    asaplog.push(msg)
1820                    print asaplog.pop().strip()
1821                    return
1822                raise IOError(msg)
1823            fullnames.append(name)
1824        if average:
1825            asaplog.push('Auto averaging integrations')
1826        stype = int(rcParams['scantable.storage'].lower() == 'disk')
1827        for name in fullnames:
1828            tbl = Scantable(stype)
1829            r = stfiller(tbl)
1830            rx = rcParams['scantable.reference']
1831            r._setreferenceexpr(rx)
1832            msg = "Importing %s..." % (name)
1833            asaplog.push(msg, False)
1834            print_log()
1835            r._open(name, -1, -1)
1836            r._read()
1837            if average:
1838                tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
1839            if not first:
1840                tbl = self._math._merge([self, tbl])
1841            Scantable.__init__(self, tbl)
1842            r._close()
1843            del r, tbl
1844            first = False
1845        if unit is not None:
1846            self.set_fluxunit(unit)
1847        self.set_freqframe(rcParams['scantable.freqframe'])
1848
1849    def __getitem__(self, key):
1850        if key < 0:
1851            key += self.nrow()
1852        if key >= self.nrow():
1853            raise IndexError("Row index out of range.")
1854        return self._getspectrum(key)
1855
1856    def __setitem__(self, key, value):
1857        if key < 0:
1858            key += self.nrow()
1859        if key >= self.nrow():
1860            raise IndexError("Row index out of range.")
1861        if not hasattr(value, "__len__") or \
1862                len(value) > self.nchan(self.getif(key)):
1863            raise ValueError("Spectrum length doesn't match.")
1864        return self._setspectrum(value, key)
1865
1866    def __len__(self):
1867        return self.nrow()
1868
1869    def __iter__(self):
1870        for i in range(len(self)):
1871            yield self[i]
Note: See TracBrowser for help on using the repository browser.