source: trunk/python/scantable.py @ 1931

Last change on this file since 1931 was 1931, checked in by WataruKawasaki, 14 years ago

New Development: No

JIRA Issue:

Ready for Test: Yes

Interface Changes: No

What Interface Changed:

Test Programs:

Put in Release Notes: No

Module(s): sdbaseline

Description: a new version of poly_baseline() by Malte


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 92.2 KB
Line 
1"""This module defines the scantable class."""
2
3import os
4try:
5    from functools import wraps as wraps_dec
6except ImportError:
7    from asap.compatibility import wraps as wraps_dec
8
9from asap.env import is_casapy
10from asap._asap import Scantable
11from asap._asap import filler
12from asap.parameters import rcParams
13from asap.logging import asaplog, asaplog_post_dec
14from asap.selector import selector
15from asap.linecatalog import linecatalog
16from asap.coordinate import coordinate
17from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
18from asap.asapfitter import fitter
19
20
21def preserve_selection(func):
22    @wraps_dec(func)
23    def wrap(obj, *args, **kw):
24        basesel = obj.get_selection()
25        try:
26            val = func(obj, *args, **kw)
27        finally:
28            obj.set_selection(basesel)
29        return val
30    return wrap
31
32def is_scantable(filename):
33    """Is the given file a scantable?
34
35    Parameters:
36
37        filename: the name of the file/directory to test
38
39    """
40    if ( os.path.isdir(filename)
41         and os.path.exists(filename+'/table.info')
42         and os.path.exists(filename+'/table.dat') ):
43        f=open(filename+'/table.info')
44        l=f.readline()
45        f.close()
46        #if ( l.find('Scantable') != -1 ):
47        if ( l.find('Measurement Set') == -1 ):
48            return True
49        else:
50            return False
51    else:
52        return False
53##     return (os.path.isdir(filename)
54##             and not os.path.exists(filename+'/table.f1')
55##             and os.path.exists(filename+'/table.info'))
56
57def is_ms(filename):
58    """Is the given file a MeasurementSet?
59
60    Parameters:
61
62        filename: the name of the file/directory to test
63
64    """
65    if ( os.path.isdir(filename)
66         and os.path.exists(filename+'/table.info')
67         and os.path.exists(filename+'/table.dat') ):
68        f=open(filename+'/table.info')
69        l=f.readline()
70        f.close()
71        if ( l.find('Measurement Set') != -1 ):
72            return True
73        else:
74            return False
75    else:
76        return False
77   
78class scantable(Scantable):
79    """\
80        The ASAP container for scans (single-dish data).
81    """
82
83    @asaplog_post_dec
84    #def __init__(self, filename, average=None, unit=None, getpt=None,
85    #             antenna=None, parallactify=None):
86    def __init__(self, filename, average=None, unit=None, parallactify=None, **args):
87        """\
88        Create a scantable from a saved one or make a reference
89
90        Parameters:
91
92            filename:     the name of an asap table on disk
93                          or
94                          the name of a rpfits/sdfits/ms file
95                          (integrations within scans are auto averaged
96                          and the whole file is read) or
97                          [advanced] a reference to an existing scantable
98
99            average:      average all integrations withinb a scan on read.
100                          The default (True) is taken from .asaprc.
101
102            unit:         brightness unit; must be consistent with K or Jy.
103                          Over-rides the default selected by the filler
104                          (input rpfits/sdfits/ms) or replaces the value
105                          in existing scantables
106
107            getpt:        for MeasurementSet input data only:
108                          If True, all pointing data are filled.
109                          The deafult is False, which makes time to load
110                          the MS data faster in some cases.
111
112            antenna:      for MeasurementSet input data only:
113                          Antenna selection. integer (id) or string (name or id).
114
115            parallactify: Indicate that the data had been parallatified. Default
116                          is taken from rc file.
117
118        """
119        if average is None:
120            average = rcParams['scantable.autoaverage']
121        #if getpt is None:
122        #    getpt = True
123        #if antenna is not None:
124        #    asaplog.push("Antenna selection currently unsupported."
125        #                 "Using ''")
126        #    asaplog.post('WARN')
127        #if antenna is None:
128        #    antenna = ''
129        #elif type(antenna) == int:
130        #    antenna = '%s' % antenna
131        #elif type(antenna) == list:
132        #    tmpstr = ''
133        #    for i in range( len(antenna) ):
134        #        if type(antenna[i]) == int:
135        #            tmpstr = tmpstr + ('%s,'%(antenna[i]))
136        #        elif type(antenna[i]) == str:
137        #            tmpstr=tmpstr+antenna[i]+','
138        #        else:
139        #            raise TypeError('Bad antenna selection.')
140        #    antenna = tmpstr.rstrip(',')
141        parallactify = parallactify or rcParams['scantable.parallactify']
142        varlist = vars()
143        from asap._asap import stmath
144        self._math = stmath( rcParams['insitu'] )
145        if isinstance(filename, Scantable):
146            Scantable.__init__(self, filename)
147        else:
148            if isinstance(filename, str):
149                filename = os.path.expandvars(filename)
150                filename = os.path.expanduser(filename)
151                if not os.path.exists(filename):
152                    s = "File '%s' not found." % (filename)
153                    raise IOError(s)
154                if is_scantable(filename):
155                    ondisk = rcParams['scantable.storage'] == 'disk'
156                    Scantable.__init__(self, filename, ondisk)
157                    if unit is not None:
158                        self.set_fluxunit(unit)
159                    # do not reset to the default freqframe
160                    #self.set_freqframe(rcParams['scantable.freqframe'])
161                #elif os.path.isdir(filename) \
162                #         and not os.path.exists(filename+'/table.f1'):
163                elif is_ms(filename):
164                    # Measurement Set
165                    opts={'ms': {}}
166                    mskeys=['getpt','antenna']
167                    for key in mskeys:
168                        if key in args.keys():
169                            opts['ms'][key] = args[key]
170                    #self._fill([filename], unit, average, getpt, antenna)
171                    self._fill([filename], unit, average, opts)
172                elif os.path.isfile(filename):
173                    #self._fill([filename], unit, average, getpt, antenna)
174                    self._fill([filename], unit, average)
175                else:
176                    msg = "The given file '%s'is not a valid " \
177                          "asap table." % (filename)
178                    raise IOError(msg)
179            elif (isinstance(filename, list) or isinstance(filename, tuple)) \
180                  and isinstance(filename[-1], str):
181                #self._fill(filename, unit, average, getpt, antenna)
182                self._fill(filename, unit, average)
183        self.parallactify(parallactify)
184        self._add_history("scantable", varlist)
185
186    @asaplog_post_dec
187    def save(self, name=None, format=None, overwrite=False):
188        """\
189        Store the scantable on disk. This can be an asap (aips++) Table,
190        SDFITS or MS2 format.
191
192        Parameters:
193
194            name:        the name of the outputfile. For format "ASCII"
195                         this is the root file name (data in 'name'.txt
196                         and header in 'name'_header.txt)
197
198            format:      an optional file format. Default is ASAP.
199                         Allowed are:
200
201                            * 'ASAP' (save as ASAP [aips++] Table),
202                            * 'SDFITS' (save as SDFITS file)
203                            * 'ASCII' (saves as ascii text file)
204                            * 'MS2' (saves as an casacore MeasurementSet V2)
205                            * 'FITS' (save as image FITS - not readable by class)
206                            * 'CLASS' (save as FITS readable by CLASS)
207
208            overwrite:   If the file should be overwritten if it exists.
209                         The default False is to return with warning
210                         without writing the output. USE WITH CARE.
211
212        Example::
213
214            scan.save('myscan.asap')
215            scan.save('myscan.sdfits', 'SDFITS')
216
217        """
218        from os import path
219        format = format or rcParams['scantable.save']
220        suffix = '.'+format.lower()
221        if name is None or name == "":
222            name = 'scantable'+suffix
223            msg = "No filename given. Using default name %s..." % name
224            asaplog.push(msg)
225        name = path.expandvars(name)
226        if path.isfile(name) or path.isdir(name):
227            if not overwrite:
228                msg = "File %s exists." % name
229                raise IOError(msg)
230        format2 = format.upper()
231        if format2 == 'ASAP':
232            self._save(name)
233        else:
234            from asap._asap import stwriter as stw
235            writer = stw(format2)
236            writer.write(self, name)
237        return
238
239    def copy(self):
240        """Return a copy of this scantable.
241
242        *Note*:
243
244            This makes a full (deep) copy. scan2 = scan1 makes a reference.
245
246        Example::
247
248            copiedscan = scan.copy()
249
250        """
251        sd = scantable(Scantable._copy(self))
252        return sd
253
254    def drop_scan(self, scanid=None):
255        """\
256        Return a new scantable where the specified scan number(s) has(have)
257        been dropped.
258
259        Parameters:
260
261            scanid:    a (list of) scan number(s)
262
263        """
264        from asap import _is_sequence_or_number as _is_valid
265        from asap import _to_list
266        from asap import unique
267        if not _is_valid(scanid):
268            raise RuntimeError( 'Please specify a scanno to drop from the scantable' )
269        scanid = _to_list(scanid)
270        allscans = unique([ self.getscan(i) for i in range(self.nrow())])
271        for sid in scanid: allscans.remove(sid)
272        if len(allscans) == 0:
273            raise ValueError("Can't remove all scans")
274        sel = selector(scans=allscans)
275        return self._select_copy(sel)
276
277    def _select_copy(self, selection):
278        orig = self.get_selection()
279        self.set_selection(orig+selection)
280        cp = self.copy()
281        self.set_selection(orig)
282        return cp
283
284    def get_scan(self, scanid=None):
285        """\
286        Return a specific scan (by scanno) or collection of scans (by
287        source name) in a new scantable.
288
289        *Note*:
290
291            See scantable.drop_scan() for the inverse operation.
292
293        Parameters:
294
295            scanid:    a (list of) scanno or a source name, unix-style
296                       patterns are accepted for source name matching, e.g.
297                       '*_R' gets all 'ref scans
298
299        Example::
300
301            # get all scans containing the source '323p459'
302            newscan = scan.get_scan('323p459')
303            # get all 'off' scans
304            refscans = scan.get_scan('*_R')
305            # get a susbset of scans by scanno (as listed in scan.summary())
306            newscan = scan.get_scan([0, 2, 7, 10])
307
308        """
309        if scanid is None:
310            raise RuntimeError( 'Please specify a scan no or name to '
311                                'retrieve from the scantable' )
312        try:
313            bsel = self.get_selection()
314            sel = selector()
315            if type(scanid) is str:
316                sel.set_name(scanid)
317                return self._select_copy(sel)
318            elif type(scanid) is int:
319                sel.set_scans([scanid])
320                return self._select_copy(sel)
321            elif type(scanid) is list:
322                sel.set_scans(scanid)
323                return self._select_copy(sel)
324            else:
325                msg = "Illegal scanid type, use 'int' or 'list' if ints."
326                raise TypeError(msg)
327        except RuntimeError:
328            raise
329
330    def __str__(self):
331        return Scantable._summary(self, True)
332
333    def summary(self, filename=None):
334        """\
335        Print a summary of the contents of this scantable.
336
337        Parameters:
338
339            filename:    the name of a file to write the putput to
340                         Default - no file output
341
342        """
343        info = Scantable._summary(self, True)
344        if filename is not None:
345            if filename is "":
346                filename = 'scantable_summary.txt'
347            from os.path import expandvars, isdir
348            filename = expandvars(filename)
349            if not isdir(filename):
350                data = open(filename, 'w')
351                data.write(info)
352                data.close()
353            else:
354                msg = "Illegal file name '%s'." % (filename)
355                raise IOError(msg)
356        return page(info)
357
358    def get_spectrum(self, rowno):
359        """Return the spectrum for the current row in the scantable as a list.
360
361        Parameters:
362
363             rowno:   the row number to retrieve the spectrum from
364
365        """
366        return self._getspectrum(rowno)
367
368    def get_mask(self, rowno):
369        """Return the mask for the current row in the scantable as a list.
370
371        Parameters:
372
373             rowno:   the row number to retrieve the mask from
374
375        """
376        return self._getmask(rowno)
377
378    def set_spectrum(self, spec, rowno):
379        """Return the spectrum for the current row in the scantable as a list.
380
381        Parameters:
382
383             spec:   the new spectrum
384
385             rowno:  the row number to set the spectrum for
386
387        """
388        assert(len(spec) == self.nchan())
389        return self._setspectrum(spec, rowno)
390
391    def get_coordinate(self, rowno):
392        """Return the (spectral) coordinate for a a given 'rowno'.
393
394        *Note*:
395
396            * This coordinate is only valid until a scantable method modifies
397              the frequency axis.
398            * This coordinate does contain the original frequency set-up
399              NOT the new frame. The conversions however are done using the user
400              specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
401              use scantable.freq_align first. Without it there is no closure,
402              i.e.::
403
404                  c = myscan.get_coordinate(0)
405                  c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
406
407        Parameters:
408
409             rowno:    the row number for the spectral coordinate
410
411        """
412        return coordinate(Scantable.get_coordinate(self, rowno))
413
414    def get_selection(self):
415        """\
416        Get the selection object currently set on this scantable.
417
418        Example::
419
420            sel = scan.get_selection()
421            sel.set_ifs(0)              # select IF 0
422            scan.set_selection(sel)     # apply modified selection
423
424        """
425        return selector(self._getselection())
426
427    def set_selection(self, selection=None, **kw):
428        """\
429        Select a subset of the data. All following operations on this scantable
430        are only applied to thi selection.
431
432        Parameters:
433
434            selection:    a selector object (default unset the selection), or
435                          any combination of "pols", "ifs", "beams", "scans",
436                          "cycles", "name", "query"
437
438        Examples::
439
440            sel = selector()         # create a selection object
441            self.set_scans([0, 3])    # select SCANNO 0 and 3
442            scan.set_selection(sel)  # set the selection
443            scan.summary()           # will only print summary of scanno 0 an 3
444            scan.set_selection()     # unset the selection
445            # or the equivalent
446            scan.set_selection(scans=[0,3])
447            scan.summary()           # will only print summary of scanno 0 an 3
448            scan.set_selection()     # unset the selection
449
450        """
451        if selection is None:
452            # reset
453            if len(kw) == 0:
454                selection = selector()
455            else:
456                # try keywords
457                for k in kw:
458                    if k not in selector.fields:
459                        raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
460                selection = selector(**kw)
461        self._setselection(selection)
462
463    def get_row(self, row=0, insitu=None):
464        """\
465        Select a row in the scantable.
466        Return a scantable with single row.
467
468        Parameters:
469
470            row:    row no of integration, default is 0.
471            insitu: if False a new scantable is returned. Otherwise, the
472                    scaling is done in-situ. The default is taken from .asaprc
473                    (False)
474
475        """
476        if insitu is None: insitu = rcParams['insitu']
477        if not insitu:
478            workscan = self.copy()
479        else:
480            workscan = self
481        # Select a row
482        sel=selector()
483        sel.set_scans([workscan.getscan(row)])
484        sel.set_cycles([workscan.getcycle(row)])
485        sel.set_beams([workscan.getbeam(row)])
486        sel.set_ifs([workscan.getif(row)])
487        sel.set_polarisations([workscan.getpol(row)])
488        sel.set_name(workscan._getsourcename(row))
489        workscan.set_selection(sel)
490        if not workscan.nrow() == 1:
491            msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
492            raise RuntimeError(msg)
493        del sel
494        if insitu:
495            self._assign(workscan)
496        else:
497            return workscan
498
499    @asaplog_post_dec
500    def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
501        """\
502        Determine the specified statistic of the current beam/if/pol
503        Takes a 'mask' as an optional parameter to specify which
504        channels should be excluded.
505
506        Parameters:
507
508            stat:    'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
509                     'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
510
511            mask:    an optional mask specifying where the statistic
512                     should be determined.
513
514            form:    format string to print statistic values
515
516            row:     row number of spectrum to process.
517                     (default is None: for all rows)
518
519        Example:
520            scan.set_unit('channel')
521            msk = scan.create_mask([100, 200], [500, 600])
522            scan.stats(stat='mean', mask=m)
523
524        """
525        mask = mask or []
526        if not self._check_ifs():
527            raise ValueError("Cannot apply mask as the IFs have different "
528                             "number of channels. Please use setselection() "
529                             "to select individual IFs")
530        rtnabc = False
531        if stat.lower().endswith('_abc'): rtnabc = True
532        getchan = False
533        if stat.lower().startswith('min') or stat.lower().startswith('max'):
534            chan = self._math._minmaxchan(self, mask, stat)
535            getchan = True
536            statvals = []
537        if not rtnabc:
538            if row == None:
539                statvals = self._math._stats(self, mask, stat)
540            else:
541                statvals = self._math._statsrow(self, mask, stat, int(row))
542
543        #def cb(i):
544        #    return statvals[i]
545
546        #return self._row_callback(cb, stat)
547
548        label=stat
549        #callback=cb
550        out = ""
551        #outvec = []
552        sep = '-'*50
553
554        if row == None:
555            rows = xrange(self.nrow())
556        elif isinstance(row, int):
557            rows = [ row ]
558
559        for i in rows:
560            refstr = ''
561            statunit= ''
562            if getchan:
563                qx, qy = self.chan2data(rowno=i, chan=chan[i])
564                if rtnabc:
565                    statvals.append(qx['value'])
566                    refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
567                    statunit= '['+qx['unit']+']'
568                else:
569                    refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
570
571            tm = self._gettime(i)
572            src = self._getsourcename(i)
573            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
574            out += 'Time[%s]:\n' % (tm)
575            if self.nbeam(-1) > 1: out +=  ' Beam[%d] ' % (self.getbeam(i))
576            if self.nif(-1) > 1:   out +=  ' IF[%d] ' % (self.getif(i))
577            if self.npol(-1) > 1:  out +=  ' Pol[%d] ' % (self.getpol(i))
578            #outvec.append(callback(i))
579            if len(rows) > 1:
580                # out += ('= %'+form) % (outvec[i]) +'   '+refstr+'\n'
581                out += ('= %'+form) % (statvals[i]) +'   '+refstr+'\n'
582            else:
583                # out += ('= %'+form) % (outvec[0]) +'   '+refstr+'\n'
584                out += ('= %'+form) % (statvals[0]) +'   '+refstr+'\n'
585            out +=  sep+"\n"
586
587        import os
588        if os.environ.has_key( 'USER' ):
589            usr = os.environ['USER']
590        else:
591            import commands
592            usr = commands.getoutput( 'whoami' )
593        tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
594        f = open(tmpfile,'w')
595        print >> f, sep
596        print >> f, ' %s %s' % (label, statunit)
597        print >> f, sep
598        print >> f, out
599        f.close()
600        f = open(tmpfile,'r')
601        x = f.readlines()
602        f.close()
603        asaplog.push(''.join(x), False)
604
605        return statvals
606
607    def chan2data(self, rowno=0, chan=0):
608        """\
609        Returns channel/frequency/velocity and spectral value
610        at an arbitrary row and channel in the scantable.
611
612        Parameters:
613
614            rowno:   a row number in the scantable. Default is the
615                     first row, i.e. rowno=0
616
617            chan:    a channel in the scantable. Default is the first
618                     channel, i.e. pos=0
619
620        """
621        if isinstance(rowno, int) and isinstance(chan, int):
622            qx = {'unit': self.get_unit(),
623                  'value': self._getabcissa(rowno)[chan]}
624            qy = {'unit': self.get_fluxunit(),
625                  'value': self._getspectrum(rowno)[chan]}
626            return qx, qy
627
628    def stddev(self, mask=None):
629        """\
630        Determine the standard deviation of the current beam/if/pol
631        Takes a 'mask' as an optional parameter to specify which
632        channels should be excluded.
633
634        Parameters:
635
636            mask:    an optional mask specifying where the standard
637                     deviation should be determined.
638
639        Example::
640
641            scan.set_unit('channel')
642            msk = scan.create_mask([100, 200], [500, 600])
643            scan.stddev(mask=m)
644
645        """
646        return self.stats(stat='stddev', mask=mask);
647
648
649    def get_column_names(self):
650        """\
651        Return a  list of column names, which can be used for selection.
652        """
653        return list(Scantable.get_column_names(self))
654
655    def get_tsys(self, row=-1):
656        """\
657        Return the System temperatures.
658
659        Parameters:
660
661            row:    the rowno to get the information for. (default all rows)
662
663        Returns:
664
665            a list of Tsys values for the current selection
666
667        """
668        if row > -1:
669            return self._get_column(self._gettsys, row)
670        return self._row_callback(self._gettsys, "Tsys")
671
672
673    def get_weather(self, row=-1):
674        """\
675        Return the weather informations.
676
677        Parameters:
678
679            row:    the rowno to get the information for. (default all rows)
680
681        Returns:
682
683            a dict or list of of dicts of values for the current selection
684
685        """
686
687        values = self._get_column(self._get_weather, row)
688        if row > -1:
689            return {'temperature': values[0],
690                    'pressure': values[1], 'humidity' : values[2],
691                    'windspeed' : values[3], 'windaz' : values[4]
692                    }
693        else:
694            out = []
695            for r in values:
696
697                out.append({'temperature': r[0],
698                            'pressure': r[1], 'humidity' : r[2],
699                            'windspeed' : r[3], 'windaz' : r[4]
700                    })
701            return out
702
703    def _row_callback(self, callback, label):
704        out = ""
705        outvec = []
706        sep = '-'*50
707        for i in range(self.nrow()):
708            tm = self._gettime(i)
709            src = self._getsourcename(i)
710            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
711            out += 'Time[%s]:\n' % (tm)
712            if self.nbeam(-1) > 1:
713                out +=  ' Beam[%d] ' % (self.getbeam(i))
714            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (self.getif(i))
715            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (self.getpol(i))
716            outvec.append(callback(i))
717            out += '= %3.3f\n' % (outvec[i])
718            out +=  sep+'\n'
719
720        asaplog.push(sep)
721        asaplog.push(" %s" % (label))
722        asaplog.push(sep)
723        asaplog.push(out)
724        asaplog.post()
725        return outvec
726
727    def _get_column(self, callback, row=-1):
728        """
729        """
730        if row == -1:
731            return [callback(i) for i in range(self.nrow())]
732        else:
733            if  0 <= row < self.nrow():
734                return callback(row)
735
736
737    def get_time(self, row=-1, asdatetime=False):
738        """\
739        Get a list of time stamps for the observations.
740        Return a datetime object for each integration time stamp in the scantable.
741
742        Parameters:
743
744            row:          row no of integration. Default -1 return all rows
745
746            asdatetime:   return values as datetime objects rather than strings
747
748        """
749        from time import strptime
750        from datetime import datetime
751        times = self._get_column(self._gettime, row)
752        if not asdatetime:
753            return times
754        format = "%Y/%m/%d/%H:%M:%S"
755        if isinstance(times, list):
756            return [datetime(*strptime(i, format)[:6]) for i in times]
757        else:
758            return datetime(*strptime(times, format)[:6])
759
760
761    def get_inttime(self, row=-1):
762        """\
763        Get a list of integration times for the observations.
764        Return a time in seconds for each integration in the scantable.
765
766        Parameters:
767
768            row:    row no of integration. Default -1 return all rows.
769
770        """
771        return self._get_column(self._getinttime, row)
772
773
774    def get_sourcename(self, row=-1):
775        """\
776        Get a list source names for the observations.
777        Return a string for each integration in the scantable.
778        Parameters:
779
780            row:    row no of integration. Default -1 return all rows.
781
782        """
783        return self._get_column(self._getsourcename, row)
784
785    def get_elevation(self, row=-1):
786        """\
787        Get a list of elevations for the observations.
788        Return a float for each integration in the scantable.
789
790        Parameters:
791
792            row:    row no of integration. Default -1 return all rows.
793
794        """
795        return self._get_column(self._getelevation, row)
796
797    def get_azimuth(self, row=-1):
798        """\
799        Get a list of azimuths for the observations.
800        Return a float for each integration in the scantable.
801
802        Parameters:
803            row:    row no of integration. Default -1 return all rows.
804
805        """
806        return self._get_column(self._getazimuth, row)
807
808    def get_parangle(self, row=-1):
809        """\
810        Get a list of parallactic angles for the observations.
811        Return a float for each integration in the scantable.
812
813        Parameters:
814
815            row:    row no of integration. Default -1 return all rows.
816
817        """
818        return self._get_column(self._getparangle, row)
819
820    def get_direction(self, row=-1):
821        """
822        Get a list of Positions on the sky (direction) for the observations.
823        Return a string for each integration in the scantable.
824
825        Parameters:
826
827            row:    row no of integration. Default -1 return all rows
828
829        """
830        return self._get_column(self._getdirection, row)
831
832    def get_directionval(self, row=-1):
833        """\
834        Get a list of Positions on the sky (direction) for the observations.
835        Return a float for each integration in the scantable.
836
837        Parameters:
838
839            row:    row no of integration. Default -1 return all rows
840
841        """
842        return self._get_column(self._getdirectionvec, row)
843
844    @asaplog_post_dec
845    def set_unit(self, unit='channel'):
846        """\
847        Set the unit for all following operations on this scantable
848
849        Parameters:
850
851            unit:    optional unit, default is 'channel'. Use one of '*Hz',
852                     'km/s', 'channel' or equivalent ''
853
854        """
855        varlist = vars()
856        if unit in ['', 'pixel', 'channel']:
857            unit = ''
858        inf = list(self._getcoordinfo())
859        inf[0] = unit
860        self._setcoordinfo(inf)
861        self._add_history("set_unit", varlist)
862
863    @asaplog_post_dec
864    def set_instrument(self, instr):
865        """\
866        Set the instrument for subsequent processing.
867
868        Parameters:
869
870            instr:    Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
871                      'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
872
873        """
874        self._setInstrument(instr)
875        self._add_history("set_instument", vars())
876
877    @asaplog_post_dec
878    def set_feedtype(self, feedtype):
879        """\
880        Overwrite the feed type, which might not be set correctly.
881
882        Parameters:
883
884            feedtype:     'linear' or 'circular'
885
886        """
887        self._setfeedtype(feedtype)
888        self._add_history("set_feedtype", vars())
889
890    @asaplog_post_dec
891    def set_doppler(self, doppler='RADIO'):
892        """\
893        Set the doppler for all following operations on this scantable.
894
895        Parameters:
896
897            doppler:    One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
898
899        """
900        varlist = vars()
901        inf = list(self._getcoordinfo())
902        inf[2] = doppler
903        self._setcoordinfo(inf)
904        self._add_history("set_doppler", vars())
905
906    @asaplog_post_dec
907    def set_freqframe(self, frame=None):
908        """\
909        Set the frame type of the Spectral Axis.
910
911        Parameters:
912
913            frame:   an optional frame type, default 'LSRK'. Valid frames are:
914                     'TOPO', 'LSRD', 'LSRK', 'BARY',
915                     'GEO', 'GALACTO', 'LGROUP', 'CMB'
916
917        Example::
918
919            scan.set_freqframe('BARY')
920
921        """
922        frame = frame or rcParams['scantable.freqframe']
923        varlist = vars()
924        # "REST" is not implemented in casacore
925        #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
926        #           'GEO', 'GALACTO', 'LGROUP', 'CMB']
927        valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
928                   'GEO', 'GALACTO', 'LGROUP', 'CMB']
929
930        if frame in valid:
931            inf = list(self._getcoordinfo())
932            inf[1] = frame
933            self._setcoordinfo(inf)
934            self._add_history("set_freqframe", varlist)
935        else:
936            msg  = "Please specify a valid freq type. Valid types are:\n", valid
937            raise TypeError(msg)
938
939    @asaplog_post_dec
940    def set_dirframe(self, frame=""):
941        """\
942        Set the frame type of the Direction on the sky.
943
944        Parameters:
945
946            frame:   an optional frame type, default ''. Valid frames are:
947                     'J2000', 'B1950', 'GALACTIC'
948
949        Example:
950
951            scan.set_dirframe('GALACTIC')
952
953        """
954        varlist = vars()
955        Scantable.set_dirframe(self, frame)
956        self._add_history("set_dirframe", varlist)
957
958    def get_unit(self):
959        """\
960        Get the default unit set in this scantable
961
962        Returns:
963
964            A unit string
965
966        """
967        inf = self._getcoordinfo()
968        unit = inf[0]
969        if unit == '': unit = 'channel'
970        return unit
971
972    @asaplog_post_dec
973    def get_abcissa(self, rowno=0):
974        """\
975        Get the abcissa in the current coordinate setup for the currently
976        selected Beam/IF/Pol
977
978        Parameters:
979
980            rowno:    an optional row number in the scantable. Default is the
981                      first row, i.e. rowno=0
982
983        Returns:
984
985            The abcissa values and the format string (as a dictionary)
986
987        """
988        abc = self._getabcissa(rowno)
989        lbl = self._getabcissalabel(rowno)
990        return abc, lbl
991
992    @asaplog_post_dec
993    def flag(self, mask=None, unflag=False):
994        """\
995        Flag the selected data using an optional channel mask.
996
997        Parameters:
998
999            mask:   an optional channel mask, created with create_mask. Default
1000                    (no mask) is all channels.
1001
1002            unflag:    if True, unflag the data
1003
1004        """
1005        varlist = vars()
1006        mask = mask or []
1007        self._flag(mask, unflag)
1008        self._add_history("flag", varlist)
1009
1010    @asaplog_post_dec
1011    def flag_row(self, rows=[], unflag=False):
1012        """\
1013        Flag the selected data in row-based manner.
1014
1015        Parameters:
1016
1017            rows:   list of row numbers to be flagged. Default is no row
1018                    (must be explicitly specified to execute row-based flagging).
1019
1020            unflag: if True, unflag the data.
1021
1022        """
1023        varlist = vars()
1024        self._flag_row(rows, unflag)
1025        self._add_history("flag_row", varlist)
1026
1027    @asaplog_post_dec
1028    def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
1029        """\
1030        Flag the selected data outside a specified range (in channel-base)
1031
1032        Parameters:
1033
1034            uthres:      upper threshold.
1035
1036            dthres:      lower threshold
1037
1038            clipoutside: True for flagging data outside the range [dthres:uthres].
1039                         False for flagging data inside the range.
1040
1041            unflag:      if True, unflag the data.
1042
1043        """
1044        varlist = vars()
1045        self._clip(uthres, dthres, clipoutside, unflag)
1046        self._add_history("clip", varlist)
1047
1048    @asaplog_post_dec
1049    def lag_flag(self, start, end, unit="MHz", insitu=None):
1050        """\
1051        Flag the data in 'lag' space by providing a frequency to remove.
1052        Flagged data in the scantable gets interpolated over the region.
1053        No taper is applied.
1054
1055        Parameters:
1056
1057            start:    the start frequency (really a period within the
1058                      bandwidth)  or period to remove
1059
1060            end:      the end frequency or period to remove
1061
1062            unit:     the frequency unit (default "MHz") or "" for
1063                      explicit lag channels
1064
1065        *Notes*:
1066
1067            It is recommended to flag edges of the band or strong
1068            signals beforehand.
1069
1070        """
1071        if insitu is None: insitu = rcParams['insitu']
1072        self._math._setinsitu(insitu)
1073        varlist = vars()
1074        base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1075        if not (unit == "" or base.has_key(unit)):
1076            raise ValueError("%s is not a valid unit." % unit)
1077        if unit == "":
1078            s = scantable(self._math._lag_flag(self, start, end, "lags"))
1079        else:
1080            s = scantable(self._math._lag_flag(self, start*base[unit],
1081                                               end*base[unit], "frequency"))
1082        s._add_history("lag_flag", varlist)
1083        if insitu:
1084            self._assign(s)
1085        else:
1086            return s
1087
1088    @asaplog_post_dec
1089    def create_mask(self, *args, **kwargs):
1090        """\
1091        Compute and return a mask based on [min, max] windows.
1092        The specified windows are to be INCLUDED, when the mask is
1093        applied.
1094
1095        Parameters:
1096
1097            [min, max], [min2, max2], ...
1098                Pairs of start/end points (inclusive)specifying the regions
1099                to be masked
1100
1101            invert:     optional argument. If specified as True,
1102                        return an inverted mask, i.e. the regions
1103                        specified are EXCLUDED
1104
1105            row:        create the mask using the specified row for
1106                        unit conversions, default is row=0
1107                        only necessary if frequency varies over rows.
1108
1109        Examples::
1110
1111            scan.set_unit('channel')
1112            # a)
1113            msk = scan.create_mask([400, 500], [800, 900])
1114            # masks everything outside 400 and 500
1115            # and 800 and 900 in the unit 'channel'
1116
1117            # b)
1118            msk = scan.create_mask([400, 500], [800, 900], invert=True)
1119            # masks the regions between 400 and 500
1120            # and 800 and 900 in the unit 'channel'
1121
1122            # c)
1123            #mask only channel 400
1124            msk =  scan.create_mask([400])
1125
1126        """
1127        row = kwargs.get("row", 0)
1128        data = self._getabcissa(row)
1129        u = self._getcoordinfo()[0]
1130        if u == "":
1131            u = "channel"
1132        msg = "The current mask window unit is %s" % u
1133        i = self._check_ifs()
1134        if not i:
1135            msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1136        asaplog.push(msg)
1137        n = self.nchan()
1138        msk = _n_bools(n, False)
1139        # test if args is a 'list' or a 'normal *args - UGLY!!!
1140
1141        ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1142             and args or args[0]
1143        for window in ws:
1144            if len(window) == 1:
1145                window = [window[0], window[0]]
1146            if len(window) == 0 or  len(window) > 2:
1147                raise ValueError("A window needs to be defined as [start(, end)]")
1148            if window[0] > window[1]:
1149                tmp = window[0]
1150                window[0] = window[1]
1151                window[1] = tmp
1152            for i in range(n):
1153                if data[i] >= window[0] and data[i] <= window[1]:
1154                    msk[i] = True
1155        if kwargs.has_key('invert'):
1156            if kwargs.get('invert'):
1157                msk = mask_not(msk)
1158        return msk
1159
1160    def get_masklist(self, mask=None, row=0, silent=False):
1161        """\
1162        Compute and return a list of mask windows, [min, max].
1163
1164        Parameters:
1165
1166            mask:       channel mask, created with create_mask.
1167
1168            row:        calcutate the masklist using the specified row
1169                        for unit conversions, default is row=0
1170                        only necessary if frequency varies over rows.
1171
1172        Returns:
1173
1174            [min, max], [min2, max2], ...
1175                Pairs of start/end points (inclusive)specifying
1176                the masked regions
1177
1178        """
1179        if not (isinstance(mask,list) or isinstance(mask, tuple)):
1180            raise TypeError("The mask should be list or tuple.")
1181        if len(mask) < 2:
1182            raise TypeError("The mask elements should be > 1")
1183        if self.nchan() != len(mask):
1184            msg = "Number of channels in scantable != number of mask elements"
1185            raise TypeError(msg)
1186        data = self._getabcissa(row)
1187        u = self._getcoordinfo()[0]
1188        if u == "":
1189            u = "channel"
1190        msg = "The current mask window unit is %s" % u
1191        i = self._check_ifs()
1192        if not i:
1193            msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1194        if not silent:
1195            asaplog.push(msg)
1196        masklist=[]
1197        ist, ien = None, None
1198        ist, ien=self.get_mask_indices(mask)
1199        if ist is not None and ien is not None:
1200            for i in xrange(len(ist)):
1201                range=[data[ist[i]],data[ien[i]]]
1202                range.sort()
1203                masklist.append([range[0],range[1]])
1204        return masklist
1205
1206    def get_mask_indices(self, mask=None):
1207        """\
1208        Compute and Return lists of mask start indices and mask end indices.
1209
1210        Parameters:
1211
1212            mask:       channel mask, created with create_mask.
1213
1214        Returns:
1215
1216            List of mask start indices and that of mask end indices,
1217            i.e., [istart1,istart2,....], [iend1,iend2,....].
1218
1219        """
1220        if not (isinstance(mask,list) or isinstance(mask, tuple)):
1221            raise TypeError("The mask should be list or tuple.")
1222        if len(mask) < 2:
1223            raise TypeError("The mask elements should be > 1")
1224        istart=[]
1225        iend=[]
1226        if mask[0]: istart.append(0)
1227        for i in range(len(mask)-1):
1228            if not mask[i] and mask[i+1]:
1229                istart.append(i+1)
1230            elif mask[i] and not mask[i+1]:
1231                iend.append(i)
1232        if mask[len(mask)-1]: iend.append(len(mask)-1)
1233        if len(istart) != len(iend):
1234            raise RuntimeError("Numbers of mask start != mask end.")
1235        for i in range(len(istart)):
1236            if istart[i] > iend[i]:
1237                raise RuntimeError("Mask start index > mask end index")
1238                break
1239        return istart,iend
1240
1241#    def get_restfreqs(self):
1242#        """
1243#        Get the restfrequency(s) stored in this scantable.
1244#        The return value(s) are always of unit 'Hz'
1245#        Parameters:
1246#            none
1247#        Returns:
1248#            a list of doubles
1249#        """
1250#        return list(self._getrestfreqs())
1251
1252    def get_restfreqs(self, ids=None):
1253        """\
1254        Get the restfrequency(s) stored in this scantable.
1255        The return value(s) are always of unit 'Hz'
1256
1257        Parameters:
1258
1259            ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1260                 be retrieved
1261
1262        Returns:
1263
1264            dictionary containing ids and a list of doubles for each id
1265
1266        """
1267        if ids is None:
1268            rfreqs={}
1269            idlist = self.getmolnos()
1270            for i in idlist:
1271                rfreqs[i]=list(self._getrestfreqs(i))
1272            return rfreqs
1273        else:
1274            if type(ids)==list or type(ids)==tuple:
1275                rfreqs={}
1276                for i in ids:
1277                    rfreqs[i]=list(self._getrestfreqs(i))
1278                return rfreqs
1279            else:
1280                return list(self._getrestfreqs(ids))
1281            #return list(self._getrestfreqs(ids))
1282
1283    def set_restfreqs(self, freqs=None, unit='Hz'):
1284        """\
1285        Set or replace the restfrequency specified and
1286        If the 'freqs' argument holds a scalar,
1287        then that rest frequency will be applied to all the selected
1288        data.  If the 'freqs' argument holds
1289        a vector, then it MUST be of equal or smaller length than
1290        the number of IFs (and the available restfrequencies will be
1291        replaced by this vector).  In this case, *all* data have
1292        the restfrequency set per IF according
1293        to the corresponding value you give in the 'freqs' vector.
1294        E.g. 'freqs=[1e9, 2e9]'  would mean IF 0 gets restfreq 1e9 and
1295        IF 1 gets restfreq 2e9.
1296
1297        You can also specify the frequencies via a linecatalog.
1298
1299        Parameters:
1300
1301            freqs:   list of rest frequency values or string idenitfiers
1302
1303            unit:    unit for rest frequency (default 'Hz')
1304
1305
1306        Example::
1307
1308            # set the given restfrequency for the all currently selected IFs
1309            scan.set_restfreqs(freqs=1.4e9)
1310            # set restfrequencies for the n IFs  (n > 1) in the order of the
1311            # list, i.e
1312            # IF0 -> 1.4e9, IF1 ->  1.41e9, IF3 -> 1.42e9
1313            # len(list_of_restfreqs) == nIF
1314            # for nIF == 1 the following will set multiple restfrequency for
1315            # that IF
1316            scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
1317            # set multiple restfrequencies per IF. as a list of lists where
1318            # the outer list has nIF elements, the inner s arbitrary
1319            scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
1320
1321       *Note*:
1322
1323            To do more sophisticate Restfrequency setting, e.g. on a
1324            source and IF basis, use scantable.set_selection() before using
1325            this function::
1326
1327                # provided your scantable is called scan
1328                selection = selector()
1329                selection.set_name("ORION*")
1330                selection.set_ifs([1])
1331                scan.set_selection(selection)
1332                scan.set_restfreqs(freqs=86.6e9)
1333
1334        """
1335        varlist = vars()
1336        from asap import linecatalog
1337        # simple  value
1338        if isinstance(freqs, int) or isinstance(freqs, float):
1339            self._setrestfreqs([freqs], [""], unit)
1340        # list of values
1341        elif isinstance(freqs, list) or isinstance(freqs, tuple):
1342            # list values are scalars
1343            if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
1344                if len(freqs) == 1:
1345                    self._setrestfreqs(freqs, [""], unit)
1346                else:
1347                    # allow the 'old' mode of setting mulitple IFs
1348                    sel = selector()
1349                    savesel = self._getselection()
1350                    iflist = self.getifnos()
1351                    if len(freqs)>len(iflist):
1352                        raise ValueError("number of elements in list of list "
1353                                         "exeeds the current IF selections")
1354                    iflist = self.getifnos()
1355                    for i, fval in enumerate(freqs):
1356                        sel.set_ifs(iflist[i])
1357                        self._setselection(sel)
1358                        self._setrestfreqs([fval], [""], unit)
1359                    self._setselection(savesel)
1360
1361            # list values are dict, {'value'=, 'name'=)
1362            elif isinstance(freqs[-1], dict):
1363                values = []
1364                names = []
1365                for d in freqs:
1366                    values.append(d["value"])
1367                    names.append(d["name"])
1368                self._setrestfreqs(values, names, unit)
1369            elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
1370                sel = selector()
1371                savesel = self._getselection()
1372                iflist = self.getifnos()
1373                if len(freqs)>len(iflist):
1374                    raise ValueError("number of elements in list of list exeeds"
1375                                     " the current IF selections")
1376                for i, fval in enumerate(freqs):
1377                    sel.set_ifs(iflist[i])
1378                    self._setselection(sel)
1379                    self._setrestfreqs(fval, [""], unit)
1380                self._setselection(savesel)
1381        # freqs are to be taken from a linecatalog
1382        elif isinstance(freqs, linecatalog):
1383            sel = selector()
1384            savesel = self._getselection()
1385            for i in xrange(freqs.nrow()):
1386                sel.set_ifs(iflist[i])
1387                self._setselection(sel)
1388                self._setrestfreqs([freqs.get_frequency(i)],
1389                                   [freqs.get_name(i)], "MHz")
1390                # ensure that we are not iterating past nIF
1391                if i == self.nif()-1: break
1392            self._setselection(savesel)
1393        else:
1394            return
1395        self._add_history("set_restfreqs", varlist)
1396
1397    def shift_refpix(self, delta):
1398        """\
1399        Shift the reference pixel of the Spectra Coordinate by an
1400        integer amount.
1401
1402        Parameters:
1403
1404            delta:   the amount to shift by
1405
1406        *Note*:
1407
1408            Be careful using this with broadband data.
1409
1410        """
1411        Scantable.shift_refpix(self, delta)
1412
1413    @asaplog_post_dec
1414    def history(self, filename=None):
1415        """\
1416        Print the history. Optionally to a file.
1417
1418        Parameters:
1419
1420            filename:    The name of the file to save the history to.
1421
1422        """
1423        hist = list(self._gethistory())
1424        out = "-"*80
1425        for h in hist:
1426            if h.startswith("---"):
1427                out = "\n".join([out, h])
1428            else:
1429                items = h.split("##")
1430                date = items[0]
1431                func = items[1]
1432                items = items[2:]
1433                out += "\n"+date+"\n"
1434                out += "Function: %s\n  Parameters:" % (func)
1435                for i in items:
1436                    s = i.split("=")
1437                    out += "\n   %s = %s" % (s[0], s[1])
1438                out = "\n".join([out, "-"*80])
1439        if filename is not None:
1440            if filename is "":
1441                filename = 'scantable_history.txt'
1442            import os
1443            filename = os.path.expandvars(os.path.expanduser(filename))
1444            if not os.path.isdir(filename):
1445                data = open(filename, 'w')
1446                data.write(out)
1447                data.close()
1448            else:
1449                msg = "Illegal file name '%s'." % (filename)
1450                raise IOError(msg)
1451        return page(out)
1452    #
1453    # Maths business
1454    #
1455    @asaplog_post_dec
1456    def average_time(self, mask=None, scanav=False, weight='tint', align=False):
1457        """\
1458        Return the (time) weighted average of a scan.
1459
1460        *Note*:
1461
1462            in channels only - align if necessary
1463
1464        Parameters:
1465
1466            mask:     an optional mask (only used for 'var' and 'tsys'
1467                      weighting)
1468
1469            scanav:   True averages each scan separately
1470                      False (default) averages all scans together,
1471
1472            weight:   Weighting scheme.
1473                      'none'     (mean no weight)
1474                      'var'      (1/var(spec) weighted)
1475                      'tsys'     (1/Tsys**2 weighted)
1476                      'tint'     (integration time weighted)
1477                      'tintsys'  (Tint/Tsys**2)
1478                      'median'   ( median averaging)
1479                      The default is 'tint'
1480
1481            align:    align the spectra in velocity before averaging. It takes
1482                      the time of the first spectrum as reference time.
1483
1484        Example::
1485
1486            # time average the scantable without using a mask
1487            newscan = scan.average_time()
1488
1489        """
1490        varlist = vars()
1491        weight = weight or 'TINT'
1492        mask = mask or ()
1493        scanav = (scanav and 'SCAN') or 'NONE'
1494        scan = (self, )
1495
1496        if align:
1497            scan = (self.freq_align(insitu=False), )
1498        s = None
1499        if weight.upper() == 'MEDIAN':
1500            s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1501                                                     scanav))
1502        else:
1503            s = scantable(self._math._average(scan, mask, weight.upper(),
1504                          scanav))
1505        s._add_history("average_time", varlist)
1506        return s
1507
1508    @asaplog_post_dec
1509    def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
1510        """\
1511        Return a scan where all spectra are converted to either
1512        Jansky or Kelvin depending upon the flux units of the scan table.
1513        By default the function tries to look the values up internally.
1514        If it can't find them (or if you want to over-ride), you must
1515        specify EITHER jyperk OR eta (and D which it will try to look up
1516        also if you don't set it). jyperk takes precedence if you set both.
1517
1518        Parameters:
1519
1520            jyperk:      the Jy / K conversion factor
1521
1522            eta:         the aperture efficiency
1523
1524            d:           the geometric diameter (metres)
1525
1526            insitu:      if False a new scantable is returned.
1527                         Otherwise, the scaling is done in-situ
1528                         The default is taken from .asaprc (False)
1529
1530        """
1531        if insitu is None: insitu = rcParams['insitu']
1532        self._math._setinsitu(insitu)
1533        varlist = vars()
1534        jyperk = jyperk or -1.0
1535        d = d or -1.0
1536        eta = eta or -1.0
1537        s = scantable(self._math._convertflux(self, d, eta, jyperk))
1538        s._add_history("convert_flux", varlist)
1539        if insitu: self._assign(s)
1540        else: return s
1541
1542    @asaplog_post_dec
1543    def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1544        """\
1545        Return a scan after applying a gain-elevation correction.
1546        The correction can be made via either a polynomial or a
1547        table-based interpolation (and extrapolation if necessary).
1548        You specify polynomial coefficients, an ascii table or neither.
1549        If you specify neither, then a polynomial correction will be made
1550        with built in coefficients known for certain telescopes (an error
1551        will occur if the instrument is not known).
1552        The data and Tsys are *divided* by the scaling factors.
1553
1554        Parameters:
1555
1556            poly:        Polynomial coefficients (default None) to compute a
1557                         gain-elevation correction as a function of
1558                         elevation (in degrees).
1559
1560            filename:    The name of an ascii file holding correction factors.
1561                         The first row of the ascii file must give the column
1562                         names and these MUST include columns
1563                         "ELEVATION" (degrees) and "FACTOR" (multiply data
1564                         by this) somewhere.
1565                         The second row must give the data type of the
1566                         column. Use 'R' for Real and 'I' for Integer.
1567                         An example file would be
1568                         (actual factors are arbitrary) :
1569
1570                         TIME ELEVATION FACTOR
1571                         R R R
1572                         0.1 0 0.8
1573                         0.2 20 0.85
1574                         0.3 40 0.9
1575                         0.4 60 0.85
1576                         0.5 80 0.8
1577                         0.6 90 0.75
1578
1579            method:      Interpolation method when correcting from a table.
1580                         Values are  "nearest", "linear" (default), "cubic"
1581                         and "spline"
1582
1583            insitu:      if False a new scantable is returned.
1584                         Otherwise, the scaling is done in-situ
1585                         The default is taken from .asaprc (False)
1586
1587        """
1588
1589        if insitu is None: insitu = rcParams['insitu']
1590        self._math._setinsitu(insitu)
1591        varlist = vars()
1592        poly = poly or ()
1593        from os.path import expandvars
1594        filename = expandvars(filename)
1595        s = scantable(self._math._gainel(self, poly, filename, method))
1596        s._add_history("gain_el", varlist)
1597        if insitu:
1598            self._assign(s)
1599        else:
1600            return s
1601
1602    @asaplog_post_dec
1603    def freq_align(self, reftime=None, method='cubic', insitu=None):
1604        """\
1605        Return a scan where all rows have been aligned in frequency/velocity.
1606        The alignment frequency frame (e.g. LSRK) is that set by function
1607        set_freqframe.
1608
1609        Parameters:
1610
1611            reftime:     reference time to align at. By default, the time of
1612                         the first row of data is used.
1613
1614            method:      Interpolation method for regridding the spectra.
1615                         Choose from "nearest", "linear", "cubic" (default)
1616                         and "spline"
1617
1618            insitu:      if False a new scantable is returned.
1619                         Otherwise, the scaling is done in-situ
1620                         The default is taken from .asaprc (False)
1621
1622        """
1623        if insitu is None: insitu = rcParams["insitu"]
1624        self._math._setinsitu(insitu)
1625        varlist = vars()
1626        reftime = reftime or ""
1627        s = scantable(self._math._freq_align(self, reftime, method))
1628        s._add_history("freq_align", varlist)
1629        if insitu: self._assign(s)
1630        else: return s
1631
1632    @asaplog_post_dec
1633    def opacity(self, tau=None, insitu=None):
1634        """\
1635        Apply an opacity correction. The data
1636        and Tsys are multiplied by the correction factor.
1637
1638        Parameters:
1639
1640            tau:         (list of) opacity from which the correction factor is
1641                         exp(tau*ZD)
1642                         where ZD is the zenith-distance.
1643                         If a list is provided, it has to be of length nIF,
1644                         nIF*nPol or 1 and in order of IF/POL, e.g.
1645                         [opif0pol0, opif0pol1, opif1pol0 ...]
1646                         if tau is `None` the opacities are determined from a
1647                         model.
1648
1649            insitu:      if False a new scantable is returned.
1650                         Otherwise, the scaling is done in-situ
1651                         The default is taken from .asaprc (False)
1652
1653        """
1654        if insitu is None: insitu = rcParams['insitu']
1655        self._math._setinsitu(insitu)
1656        varlist = vars()
1657        if not hasattr(tau, "__len__"):
1658            tau = [tau]
1659        s = scantable(self._math._opacity(self, tau))
1660        s._add_history("opacity", varlist)
1661        if insitu: self._assign(s)
1662        else: return s
1663
1664    @asaplog_post_dec
1665    def bin(self, width=5, insitu=None):
1666        """\
1667        Return a scan where all spectra have been binned up.
1668
1669        Parameters:
1670
1671            width:       The bin width (default=5) in pixels
1672
1673            insitu:      if False a new scantable is returned.
1674                         Otherwise, the scaling is done in-situ
1675                         The default is taken from .asaprc (False)
1676
1677        """
1678        if insitu is None: insitu = rcParams['insitu']
1679        self._math._setinsitu(insitu)
1680        varlist = vars()
1681        s = scantable(self._math._bin(self, width))
1682        s._add_history("bin", varlist)
1683        if insitu:
1684            self._assign(s)
1685        else:
1686            return s
1687
1688    @asaplog_post_dec
1689    def resample(self, width=5, method='cubic', insitu=None):
1690        """\
1691        Return a scan where all spectra have been binned up.
1692
1693        Parameters:
1694
1695            width:       The bin width (default=5) in pixels
1696
1697            method:      Interpolation method when correcting from a table.
1698                         Values are  "nearest", "linear", "cubic" (default)
1699                         and "spline"
1700
1701            insitu:      if False a new scantable is returned.
1702                         Otherwise, the scaling is done in-situ
1703                         The default is taken from .asaprc (False)
1704
1705        """
1706        if insitu is None: insitu = rcParams['insitu']
1707        self._math._setinsitu(insitu)
1708        varlist = vars()
1709        s = scantable(self._math._resample(self, method, width))
1710        s._add_history("resample", varlist)
1711        if insitu: self._assign(s)
1712        else: return s
1713
1714    @asaplog_post_dec
1715    def average_pol(self, mask=None, weight='none'):
1716        """\
1717        Average the Polarisations together.
1718
1719        Parameters:
1720
1721            mask:        An optional mask defining the region, where the
1722                         averaging will be applied. The output will have all
1723                         specified points masked.
1724
1725            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1726                         weighted), or 'tsys' (1/Tsys**2 weighted)
1727
1728        """
1729        varlist = vars()
1730        mask = mask or ()
1731        s = scantable(self._math._averagepol(self, mask, weight.upper()))
1732        s._add_history("average_pol", varlist)
1733        return s
1734
1735    @asaplog_post_dec
1736    def average_beam(self, mask=None, weight='none'):
1737        """\
1738        Average the Beams together.
1739
1740        Parameters:
1741            mask:        An optional mask defining the region, where the
1742                         averaging will be applied. The output will have all
1743                         specified points masked.
1744
1745            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1746                         weighted), or 'tsys' (1/Tsys**2 weighted)
1747
1748        """
1749        varlist = vars()
1750        mask = mask or ()
1751        s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1752        s._add_history("average_beam", varlist)
1753        return s
1754
1755    def parallactify(self, pflag):
1756        """\
1757        Set a flag to indicate whether this data should be treated as having
1758        been 'parallactified' (total phase == 0.0)
1759
1760        Parameters:
1761
1762            pflag:  Bool indicating whether to turn this on (True) or
1763                    off (False)
1764
1765        """
1766        varlist = vars()
1767        self._parallactify(pflag)
1768        self._add_history("parallactify", varlist)
1769
1770    @asaplog_post_dec
1771    def convert_pol(self, poltype=None):
1772        """\
1773        Convert the data to a different polarisation type.
1774        Note that you will need cross-polarisation terms for most conversions.
1775
1776        Parameters:
1777
1778            poltype:    The new polarisation type. Valid types are:
1779                        "linear", "circular", "stokes" and "linpol"
1780
1781        """
1782        varlist = vars()
1783        s = scantable(self._math._convertpol(self, poltype))
1784        s._add_history("convert_pol", varlist)
1785        return s
1786
1787    @asaplog_post_dec
1788    def smooth(self, kernel="hanning", width=5.0, order=2, plot=False, insitu=None):
1789        """\
1790        Smooth the spectrum by the specified kernel (conserving flux).
1791
1792        Parameters:
1793
1794            kernel:     The type of smoothing kernel. Select from
1795                        'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1796                        or 'poly'
1797
1798            width:      The width of the kernel in pixels. For hanning this is
1799                        ignored otherwise it defauls to 5 pixels.
1800                        For 'gaussian' it is the Full Width Half
1801                        Maximum. For 'boxcar' it is the full width.
1802                        For 'rmedian' and 'poly' it is the half width.
1803
1804            order:      Optional parameter for 'poly' kernel (default is 2), to
1805                        specify the order of the polnomial. Ignored by all other
1806                        kernels.
1807
1808            plot:       plot the original and the smoothed spectra.
1809                        In this each indivual fit has to be approved, by
1810                        typing 'y' or 'n'
1811
1812            insitu:     if False a new scantable is returned.
1813                        Otherwise, the scaling is done in-situ
1814                        The default is taken from .asaprc (False)
1815
1816        """
1817        if insitu is None: insitu = rcParams['insitu']
1818        self._math._setinsitu(insitu)
1819        varlist = vars()
1820
1821        if plot: orgscan = self.copy()
1822
1823        s = scantable(self._math._smooth(self, kernel.lower(), width, order))
1824        s._add_history("smooth", varlist)
1825
1826        if plot:
1827            if rcParams['plotter.gui']:
1828                from asap.asaplotgui import asaplotgui as asaplot
1829            else:
1830                from asap.asaplot import asaplot
1831            self._p=asaplot()
1832            self._p.set_panels()
1833            ylab=s._get_ordinate_label()
1834            #self._p.palette(0,["#777777","red"])
1835            for r in xrange(s.nrow()):
1836                xsm=s._getabcissa(r)
1837                ysm=s._getspectrum(r)
1838                xorg=orgscan._getabcissa(r)
1839                yorg=orgscan._getspectrum(r)
1840                self._p.clear()
1841                self._p.hold()
1842                self._p.set_axes('ylabel',ylab)
1843                self._p.set_axes('xlabel',s._getabcissalabel(r))
1844                self._p.set_axes('title',s._getsourcename(r))
1845                self._p.set_line(label='Original',color="#777777")
1846                self._p.plot(xorg,yorg)
1847                self._p.set_line(label='Smoothed',color="red")
1848                self._p.plot(xsm,ysm)
1849                ### Ugly part for legend
1850                for i in [0,1]:
1851                    self._p.subplots[0]['lines'].append([self._p.subplots[0]['axes'].lines[i]])
1852                self._p.release()
1853                ### Ugly part for legend
1854                self._p.subplots[0]['lines']=[]
1855                res = raw_input("Accept smoothing ([y]/n): ")
1856                if res.upper() == 'N':
1857                    s._setspectrum(yorg, r)
1858            self._p.unmap()
1859            self._p = None
1860            del orgscan
1861
1862        if insitu: self._assign(s)
1863        else: return s
1864
1865    @asaplog_post_dec
1866    def old_poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None, rows=None):
1867        """\
1868        Return a scan which has been baselined (all rows) by a polynomial.
1869       
1870        Parameters:
1871
1872            mask:       an optional mask
1873
1874            order:      the order of the polynomial (default is 0)
1875
1876            plot:       plot the fit and the residual. In this each
1877                        indivual fit has to be approved, by typing 'y'
1878                        or 'n'
1879
1880            uselin:     use linear polynomial fit
1881
1882            insitu:     if False a new scantable is returned.
1883                        Otherwise, the scaling is done in-situ
1884                        The default is taken from .asaprc (False)
1885
1886            rows:       row numbers of spectra to be processed.
1887                        (default is None: for all rows)
1888       
1889        Example:
1890            # return a scan baselined by a third order polynomial,
1891            # not using a mask
1892            bscan = scan.poly_baseline(order=3)
1893
1894        """
1895        if insitu is None: insitu = rcParams['insitu']
1896        if not insitu:
1897            workscan = self.copy()
1898        else:
1899            workscan = self
1900        varlist = vars()
1901        if mask is None:
1902            mask = [True for i in xrange(self.nchan())]
1903
1904        try:
1905            f = fitter()
1906            if uselin:
1907                f.set_function(lpoly=order)
1908            else:
1909                f.set_function(poly=order)
1910
1911            if rows == None:
1912                rows = xrange(workscan.nrow())
1913            elif isinstance(rows, int):
1914                rows = [ rows ]
1915           
1916            if len(rows) > 0:
1917                self.blpars = []
1918                self.masklists = []
1919                self.actualmask = []
1920           
1921            for r in rows:
1922                f.x = workscan._getabcissa(r)
1923                f.y = workscan._getspectrum(r)
1924                f.mask = mask_and(mask, workscan._getmask(r))    # (CAS-1434)
1925                f.data = None
1926                f.fit()
1927                if plot:
1928                    f.plot(residual=True)
1929                    x = raw_input("Accept fit ( [y]/n ): ")
1930                    if x.upper() == 'N':
1931                        self.blpars.append(None)
1932                        self.masklists.append(None)
1933                        self.actualmask.append(None)
1934                        continue
1935                workscan._setspectrum(f.fitter.getresidual(), r)
1936                self.blpars.append(f.get_parameters())
1937                self.masklists.append(workscan.get_masklist(f.mask, row=r, silent=True))
1938                self.actualmask.append(f.mask)
1939
1940            if plot:
1941                f._p.unmap()
1942                f._p = None
1943            workscan._add_history("poly_baseline", varlist)
1944            if insitu:
1945                self._assign(workscan)
1946            else:
1947                return workscan
1948        except RuntimeError:
1949            msg = "The fit failed, possibly because it didn't converge."
1950            raise RuntimeError(msg)
1951
1952    @asaplog_post_dec
1953    def poly_baseline(self, mask=None, order=0, plot=False, batch=False, insitu=None, rows=None):
1954        """\
1955        Return a scan which has been baselined (all rows) by a polynomial.
1956        Parameters:
1957            mask:       an optional mask
1958            order:      the order of the polynomial (default is 0)
1959            plot:       plot the fit and the residual. In this each
1960                        indivual fit has to be approved, by typing 'y'
1961                        or 'n'. Ignored if batch = True.
1962            batch:      if True a faster algorithm is used and logs
1963                        including the fit results are not output
1964                        (default is False)
1965            insitu:     if False a new scantable is returned.
1966                        Otherwise, the scaling is done in-situ
1967                        The default is taken from .asaprc (False)
1968            rows:       row numbers of spectra to be baselined.
1969                        (default is None: for all rows)
1970        Example:
1971            # return a scan baselined by a third order polynomial,
1972            # not using a mask
1973            bscan = scan.poly_baseline(order=3)
1974        """
1975       
1976        varlist = vars()
1977       
1978        if insitu is None: insitu = rcParams["insitu"]
1979        if insitu:
1980            workscan = self
1981        else:
1982            workscan = self.copy()
1983
1984        nchan = workscan.nchan()
1985       
1986        if mask is None:
1987            mask = [True for i in xrange(nchan)]
1988
1989        try:
1990            if rows == None:
1991                rows = xrange(workscan.nrow())
1992            elif isinstance(rows, int):
1993                rows = [ rows ]
1994           
1995            if len(rows) > 0:
1996                workscan.blpars = []
1997                workscan.masklists = []
1998                workscan.actualmask = []
1999
2000            if batch:
2001                workscan._poly_baseline_batch(mask, order)
2002            elif plot:
2003                f = fitter()
2004                f.set_function(lpoly=order)
2005                for r in rows:
2006                    f.x = workscan._getabcissa(r)
2007                    f.y = workscan._getspectrum(r)
2008                    f.mask = mask_and(mask, workscan._getmask(r))    # (CAS-1434)
2009                    f.data = None
2010                    f.fit()
2011                   
2012                    f.plot(residual=True)
2013                    accept_fit = raw_input("Accept fit ( [y]/n ): ")
2014                    if accept_fit.upper() == "N":
2015                        self.blpars.append(None)
2016                        self.masklists.append(None)
2017                        self.actualmask.append(None)
2018                        continue
2019                    workscan._setspectrum(f.fitter.getresidual(), r)
2020                    workscan.blpars.append(f.get_parameters())
2021                    workscan.masklists.append(workscan.get_masklist(f.mask, row=r))
2022                    workscan.actualmask.append(f.mask)
2023                   
2024                f._p.unmap()
2025                f._p = None
2026            else:
2027                for r in rows:
2028                    fitparams = workscan._poly_baseline(mask, order, r)
2029                    params = fitparams.getparameters()
2030                    fmtd = ", ".join(["p%d = %3.6f" % (i, v) for i, v in enumerate(params)])
2031                    errors = fitparams.geterrors()
2032                    fmask = mask_and(mask, workscan._getmask(r))
2033
2034                    workscan.blpars.append({"params":params,
2035                                            "fixed": fitparams.getfixedparameters(),
2036                                            "formatted":fmtd, "errors":errors})
2037                    workscan.masklists.append(workscan.get_masklist(fmask, r, silent=True))
2038                    workscan.actualmask.append(fmask)
2039                   
2040                    asaplog.push(fmtd)
2041           
2042            workscan._add_history("poly_baseline", varlist)
2043           
2044            if insitu:
2045                self._assign(workscan)
2046            else:
2047                return workscan
2048           
2049        except RuntimeError, e:
2050            msg = "The fit failed, possibly because it didn't converge."
2051            if rcParams["verbose"]:
2052                asaplog.push(str(e))
2053                asaplog.push(str(msg))
2054                return
2055            else:
2056                raise RuntimeError(str(e)+'\n'+msg)
2057
2058
2059    def auto_poly_baseline(self, mask=None, edge=(0, 0), order=0,
2060                           threshold=3, chan_avg_limit=1, plot=False,
2061                           insitu=None, rows=None):
2062        """\
2063        Return a scan which has been baselined (all rows) by a polynomial.
2064        Spectral lines are detected first using linefinder and masked out
2065        to avoid them affecting the baseline solution.
2066
2067        Parameters:
2068
2069            mask:       an optional mask retreived from scantable
2070
2071            edge:       an optional number of channel to drop at the edge of
2072                        spectrum. If only one value is
2073                        specified, the same number will be dropped from
2074                        both sides of the spectrum. Default is to keep
2075                        all channels. Nested tuples represent individual
2076                        edge selection for different IFs (a number of spectral
2077                        channels can be different)
2078
2079            order:      the order of the polynomial (default is 0)
2080
2081            threshold:  the threshold used by line finder. It is better to
2082                        keep it large as only strong lines affect the
2083                        baseline solution.
2084
2085            chan_avg_limit:
2086                        a maximum number of consequtive spectral channels to
2087                        average during the search of weak and broad lines.
2088                        The default is no averaging (and no search for weak
2089                        lines). If such lines can affect the fitted baseline
2090                        (e.g. a high order polynomial is fitted), increase this
2091                        parameter (usually values up to 8 are reasonable). Most
2092                        users of this method should find the default value
2093                        sufficient.
2094
2095            plot:       plot the fit and the residual. In this each
2096                        indivual fit has to be approved, by typing 'y'
2097                        or 'n'
2098
2099            insitu:     if False a new scantable is returned.
2100                        Otherwise, the scaling is done in-situ
2101                        The default is taken from .asaprc (False)
2102            rows:       row numbers of spectra to be processed.
2103                        (default is None: for all rows)
2104
2105
2106        Example::
2107
2108            scan2 = scan.auto_poly_baseline(order=7, insitu=False)
2109
2110        """
2111        if insitu is None: insitu = rcParams['insitu']
2112        varlist = vars()
2113        from asap.asaplinefind import linefinder
2114        from asap import _is_sequence_or_number as _is_valid
2115
2116        # check whether edge is set up for each IF individually
2117        individualedge = False;
2118        if len(edge) > 1:
2119            if isinstance(edge[0], list) or isinstance(edge[0], tuple):
2120                individualedge = True;
2121
2122        if not _is_valid(edge, int) and not individualedge:
2123            raise ValueError, "Parameter 'edge' has to be an integer or a \
2124            pair of integers specified as a tuple. Nested tuples are allowed \
2125            to make individual selection for different IFs."
2126
2127        curedge = (0, 0)
2128        if individualedge:
2129            for edgepar in edge:
2130                if not _is_valid(edgepar, int):
2131                    raise ValueError, "Each element of the 'edge' tuple has \
2132                                       to be a pair of integers or an integer."
2133        else:
2134            curedge = edge;
2135
2136        if not insitu:
2137            workscan = self.copy()
2138        else:
2139            workscan = self
2140
2141        # setup fitter
2142        f = fitter()
2143        f.set_function(lpoly=order)
2144
2145        # setup line finder
2146        fl = linefinder()
2147        fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
2148
2149        fl.set_scan(workscan)
2150
2151        if mask is None:
2152            mask = _n_bools(workscan.nchan(), True)
2153       
2154        if rows is None:
2155            rows = xrange(workscan.nrow())
2156        elif isinstance(rows, int):
2157            rows = [ rows ]
2158       
2159        # Save parameters of baseline fits & masklists as a class attribute.
2160        # NOTICE: It does not reflect changes in scantable!
2161        if len(rows) > 0:
2162            self.blpars=[]
2163            self.masklists=[]
2164            self.actualmask=[]
2165        asaplog.push("Processing:")
2166        for r in rows:
2167            msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
2168                (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
2169                 workscan.getpol(r), workscan.getcycle(r))
2170            asaplog.push(msg, False)
2171
2172            # figure out edge parameter
2173            if individualedge:
2174                if len(edge) >= workscan.getif(r):
2175                    raise RuntimeError, "Number of edge elements appear to " \
2176                                        "be less than the number of IFs"
2177                    curedge = edge[workscan.getif(r)]
2178
2179            actualmask = mask_and(mask, workscan._getmask(r))    # (CAS-1434)
2180
2181            # setup line finder
2182            fl.find_lines(r, actualmask, curedge)
2183           
2184            f.x = workscan._getabcissa(r)
2185            f.y = workscan._getspectrum(r)
2186            f.mask = fl.get_mask()
2187            f.data = None
2188            f.fit()
2189
2190            # Show mask list
2191            masklist=workscan.get_masklist(f.mask, row=r, silent=True)
2192            msg = "mask range: "+str(masklist)
2193            asaplog.push(msg, False)
2194
2195            if plot:
2196                f.plot(residual=True)
2197                x = raw_input("Accept fit ( [y]/n ): ")
2198                if x.upper() == 'N':
2199                    self.blpars.append(None)
2200                    self.masklists.append(None)
2201                    self.actualmask.append(None)
2202                    continue
2203
2204            workscan._setspectrum(f.fitter.getresidual(), r)
2205            self.blpars.append(f.get_parameters())
2206            self.masklists.append(masklist)
2207            self.actualmask.append(f.mask)
2208        if plot:
2209            f._p.unmap()
2210            f._p = None
2211        workscan._add_history("auto_poly_baseline", varlist)
2212        if insitu:
2213            self._assign(workscan)
2214        else:
2215            return workscan
2216
2217    @asaplog_post_dec
2218    def rotate_linpolphase(self, angle):
2219        """\
2220        Rotate the phase of the complex polarization O=Q+iU correlation.
2221        This is always done in situ in the raw data.  So if you call this
2222        function more than once then each call rotates the phase further.
2223
2224        Parameters:
2225
2226            angle:   The angle (degrees) to rotate (add) by.
2227
2228        Example::
2229
2230            scan.rotate_linpolphase(2.3)
2231
2232        """
2233        varlist = vars()
2234        self._math._rotate_linpolphase(self, angle)
2235        self._add_history("rotate_linpolphase", varlist)
2236        return
2237
2238    @asaplog_post_dec
2239    def rotate_xyphase(self, angle):
2240        """\
2241        Rotate the phase of the XY correlation.  This is always done in situ
2242        in the data.  So if you call this function more than once
2243        then each call rotates the phase further.
2244
2245        Parameters:
2246
2247            angle:   The angle (degrees) to rotate (add) by.
2248
2249        Example::
2250
2251            scan.rotate_xyphase(2.3)
2252
2253        """
2254        varlist = vars()
2255        self._math._rotate_xyphase(self, angle)
2256        self._add_history("rotate_xyphase", varlist)
2257        return
2258
2259    @asaplog_post_dec
2260    def swap_linears(self):
2261        """\
2262        Swap the linear polarisations XX and YY, or better the first two
2263        polarisations as this also works for ciculars.
2264        """
2265        varlist = vars()
2266        self._math._swap_linears(self)
2267        self._add_history("swap_linears", varlist)
2268        return
2269
2270    @asaplog_post_dec
2271    def invert_phase(self):
2272        """\
2273        Invert the phase of the complex polarisation
2274        """
2275        varlist = vars()
2276        self._math._invert_phase(self)
2277        self._add_history("invert_phase", varlist)
2278        return
2279
2280    @asaplog_post_dec
2281    def add(self, offset, insitu=None):
2282        """\
2283        Return a scan where all spectra have the offset added
2284
2285        Parameters:
2286
2287            offset:      the offset
2288
2289            insitu:      if False a new scantable is returned.
2290                         Otherwise, the scaling is done in-situ
2291                         The default is taken from .asaprc (False)
2292
2293        """
2294        if insitu is None: insitu = rcParams['insitu']
2295        self._math._setinsitu(insitu)
2296        varlist = vars()
2297        s = scantable(self._math._unaryop(self, offset, "ADD", False))
2298        s._add_history("add", varlist)
2299        if insitu:
2300            self._assign(s)
2301        else:
2302            return s
2303
2304    @asaplog_post_dec
2305    def scale(self, factor, tsys=True, insitu=None):
2306        """\
2307
2308        Return a scan where all spectra are scaled by the give 'factor'
2309
2310        Parameters:
2311
2312            factor:      the scaling factor (float or 1D float list)
2313
2314            insitu:      if False a new scantable is returned.
2315                         Otherwise, the scaling is done in-situ
2316                         The default is taken from .asaprc (False)
2317
2318            tsys:        if True (default) then apply the operation to Tsys
2319                         as well as the data
2320
2321        """
2322        if insitu is None: insitu = rcParams['insitu']
2323        self._math._setinsitu(insitu)
2324        varlist = vars()
2325        s = None
2326        import numpy
2327        if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
2328            if isinstance(factor[0], list) or isinstance(factor[0], numpy.ndarray):
2329                from asapmath import _array2dOp
2330                s = _array2dOp( self.copy(), factor, "MUL", tsys )
2331            else:
2332                s = scantable( self._math._arrayop( self.copy(), factor, "MUL", tsys ) )
2333        else:
2334            s = scantable(self._math._unaryop(self.copy(), factor, "MUL", tsys))
2335        s._add_history("scale", varlist)
2336        if insitu:
2337            self._assign(s)
2338        else:
2339            return s
2340
2341    def set_sourcetype(self, match, matchtype="pattern",
2342                       sourcetype="reference"):
2343        """\
2344        Set the type of the source to be an source or reference scan
2345        using the provided pattern.
2346
2347        Parameters:
2348
2349            match:          a Unix style pattern, regular expression or selector
2350
2351            matchtype:      'pattern' (default) UNIX style pattern or
2352                            'regex' regular expression
2353
2354            sourcetype:     the type of the source to use (source/reference)
2355
2356        """
2357        varlist = vars()
2358        basesel = self.get_selection()
2359        stype = -1
2360        if sourcetype.lower().startswith("r"):
2361            stype = 1
2362        elif sourcetype.lower().startswith("s"):
2363            stype = 0
2364        else:
2365            raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
2366        if matchtype.lower().startswith("p"):
2367            matchtype = "pattern"
2368        elif matchtype.lower().startswith("r"):
2369            matchtype = "regex"
2370        else:
2371            raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
2372        sel = selector()
2373        if isinstance(match, selector):
2374            sel = match
2375        else:
2376            sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
2377        self.set_selection(basesel+sel)
2378        self._setsourcetype(stype)
2379        self.set_selection(basesel)
2380        self._add_history("set_sourcetype", varlist)
2381
2382    @asaplog_post_dec
2383    @preserve_selection
2384    def auto_quotient(self, preserve=True, mode='paired', verify=False):
2385        """\
2386        This function allows to build quotients automatically.
2387        It assumes the observation to have the same number of
2388        "ons" and "offs"
2389
2390        Parameters:
2391
2392            preserve:       you can preserve (default) the continuum or
2393                            remove it.  The equations used are
2394
2395                            preserve: Output = Toff * (on/off) - Toff
2396
2397                            remove:   Output = Toff * (on/off) - Ton
2398
2399            mode:           the on/off detection mode
2400                            'paired' (default)
2401                            identifies 'off' scans by the
2402                            trailing '_R' (Mopra/Parkes) or
2403                            '_e'/'_w' (Tid) and matches
2404                            on/off pairs from the observing pattern
2405                            'time'
2406                            finds the closest off in time
2407
2408        .. todo:: verify argument is not implemented
2409
2410        """
2411        varlist = vars()
2412        modes = ["time", "paired"]
2413        if not mode in modes:
2414            msg = "please provide valid mode. Valid modes are %s" % (modes)
2415            raise ValueError(msg)
2416        s = None
2417        if mode.lower() == "paired":
2418            sel = self.get_selection()
2419            sel.set_query("SRCTYPE==psoff")
2420            self.set_selection(sel)
2421            offs = self.copy()
2422            sel.set_query("SRCTYPE==pson")
2423            self.set_selection(sel)
2424            ons = self.copy()
2425            s = scantable(self._math._quotient(ons, offs, preserve))
2426        elif mode.lower() == "time":
2427            s = scantable(self._math._auto_quotient(self, mode, preserve))
2428        s._add_history("auto_quotient", varlist)
2429        return s
2430
2431    @asaplog_post_dec
2432    def mx_quotient(self, mask = None, weight='median', preserve=True):
2433        """\
2434        Form a quotient using "off" beams when observing in "MX" mode.
2435
2436        Parameters:
2437
2438            mask:           an optional mask to be used when weight == 'stddev'
2439
2440            weight:         How to average the off beams.  Default is 'median'.
2441
2442            preserve:       you can preserve (default) the continuum or
2443                            remove it.  The equations used are:
2444
2445                                preserve: Output = Toff * (on/off) - Toff
2446
2447                                remove:   Output = Toff * (on/off) - Ton
2448
2449        """
2450        mask = mask or ()
2451        varlist = vars()
2452        on = scantable(self._math._mx_extract(self, 'on'))
2453        preoff = scantable(self._math._mx_extract(self, 'off'))
2454        off = preoff.average_time(mask=mask, weight=weight, scanav=False)
2455        from asapmath  import quotient
2456        q = quotient(on, off, preserve)
2457        q._add_history("mx_quotient", varlist)
2458        return q
2459
2460    @asaplog_post_dec
2461    def freq_switch(self, insitu=None):
2462        """\
2463        Apply frequency switching to the data.
2464
2465        Parameters:
2466
2467            insitu:      if False a new scantable is returned.
2468                         Otherwise, the swictching is done in-situ
2469                         The default is taken from .asaprc (False)
2470
2471        """
2472        if insitu is None: insitu = rcParams['insitu']
2473        self._math._setinsitu(insitu)
2474        varlist = vars()
2475        s = scantable(self._math._freqswitch(self))
2476        s._add_history("freq_switch", varlist)
2477        if insitu:
2478            self._assign(s)
2479        else:
2480            return s
2481
2482    @asaplog_post_dec
2483    def recalc_azel(self):
2484        """Recalculate the azimuth and elevation for each position."""
2485        varlist = vars()
2486        self._recalcazel()
2487        self._add_history("recalc_azel", varlist)
2488        return
2489
2490    @asaplog_post_dec
2491    def __add__(self, other):
2492        varlist = vars()
2493        s = None
2494        if isinstance(other, scantable):
2495            s = scantable(self._math._binaryop(self, other, "ADD"))
2496        elif isinstance(other, float):
2497            s = scantable(self._math._unaryop(self, other, "ADD", False))
2498        else:
2499            raise TypeError("Other input is not a scantable or float value")
2500        s._add_history("operator +", varlist)
2501        return s
2502
2503    @asaplog_post_dec
2504    def __sub__(self, other):
2505        """
2506        implicit on all axes and on Tsys
2507        """
2508        varlist = vars()
2509        s = None
2510        if isinstance(other, scantable):
2511            s = scantable(self._math._binaryop(self, other, "SUB"))
2512        elif isinstance(other, float):
2513            s = scantable(self._math._unaryop(self, other, "SUB", False))
2514        else:
2515            raise TypeError("Other input is not a scantable or float value")
2516        s._add_history("operator -", varlist)
2517        return s
2518
2519    @asaplog_post_dec
2520    def __mul__(self, other):
2521        """
2522        implicit on all axes and on Tsys
2523        """
2524        varlist = vars()
2525        s = None
2526        if isinstance(other, scantable):
2527            s = scantable(self._math._binaryop(self, other, "MUL"))
2528        elif isinstance(other, float):
2529            s = scantable(self._math._unaryop(self, other, "MUL", False))
2530        else:
2531            raise TypeError("Other input is not a scantable or float value")
2532        s._add_history("operator *", varlist)
2533        return s
2534
2535
2536    @asaplog_post_dec
2537    def __div__(self, other):
2538        """
2539        implicit on all axes and on Tsys
2540        """
2541        varlist = vars()
2542        s = None
2543        if isinstance(other, scantable):
2544            s = scantable(self._math._binaryop(self, other, "DIV"))
2545        elif isinstance(other, float):
2546            if other == 0.0:
2547                raise ZeroDivisionError("Dividing by zero is not recommended")
2548            s = scantable(self._math._unaryop(self, other, "DIV", False))
2549        else:
2550            raise TypeError("Other input is not a scantable or float value")
2551        s._add_history("operator /", varlist)
2552        return s
2553
2554    @asaplog_post_dec
2555    def get_fit(self, row=0):
2556        """\
2557        Print or return the stored fits for a row in the scantable
2558
2559        Parameters:
2560
2561            row:    the row which the fit has been applied to.
2562
2563        """
2564        if row > self.nrow():
2565            return
2566        from asap.asapfit import asapfit
2567        fit = asapfit(self._getfit(row))
2568        asaplog.push( '%s' %(fit) )
2569        return fit.as_dict()
2570
2571    def flag_nans(self):
2572        """\
2573        Utility function to flag NaN values in the scantable.
2574        """
2575        import numpy
2576        basesel = self.get_selection()
2577        for i in range(self.nrow()):
2578            sel = self.get_row_selector(i)
2579            self.set_selection(basesel+sel)
2580            nans = numpy.isnan(self._getspectrum(0))
2581        if numpy.any(nans):
2582            bnans = [ bool(v) for v in nans]
2583            self.flag(bnans)
2584        self.set_selection(basesel)
2585
2586    def get_row_selector(self, rowno):
2587        return selector(beams=self.getbeam(rowno),
2588                        ifs=self.getif(rowno),
2589                        pols=self.getpol(rowno),
2590                        scans=self.getscan(rowno),
2591                        cycles=self.getcycle(rowno))
2592
2593    def _add_history(self, funcname, parameters):
2594        if not rcParams['scantable.history']:
2595            return
2596        # create date
2597        sep = "##"
2598        from datetime import datetime
2599        dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
2600        hist = dstr+sep
2601        hist += funcname+sep#cdate+sep
2602        if parameters.has_key('self'): del parameters['self']
2603        for k, v in parameters.iteritems():
2604            if type(v) is dict:
2605                for k2, v2 in v.iteritems():
2606                    hist += k2
2607                    hist += "="
2608                    if isinstance(v2, scantable):
2609                        hist += 'scantable'
2610                    elif k2 == 'mask':
2611                        if isinstance(v2, list) or isinstance(v2, tuple):
2612                            hist += str(self._zip_mask(v2))
2613                        else:
2614                            hist += str(v2)
2615                    else:
2616                        hist += str(v2)
2617            else:
2618                hist += k
2619                hist += "="
2620                if isinstance(v, scantable):
2621                    hist += 'scantable'
2622                elif k == 'mask':
2623                    if isinstance(v, list) or isinstance(v, tuple):
2624                        hist += str(self._zip_mask(v))
2625                    else:
2626                        hist += str(v)
2627                else:
2628                    hist += str(v)
2629            hist += sep
2630        hist = hist[:-2] # remove trailing '##'
2631        self._addhistory(hist)
2632
2633
2634    def _zip_mask(self, mask):
2635        mask = list(mask)
2636        i = 0
2637        segments = []
2638        while mask[i:].count(1):
2639            i += mask[i:].index(1)
2640            if mask[i:].count(0):
2641                j = i + mask[i:].index(0)
2642            else:
2643                j = len(mask)
2644            segments.append([i, j])
2645            i = j
2646        return segments
2647
2648    def _get_ordinate_label(self):
2649        fu = "("+self.get_fluxunit()+")"
2650        import re
2651        lbl = "Intensity"
2652        if re.match(".K.", fu):
2653            lbl = "Brightness Temperature "+ fu
2654        elif re.match(".Jy.", fu):
2655            lbl = "Flux density "+ fu
2656        return lbl
2657
2658    def _check_ifs(self):
2659        nchans = [self.nchan(i) for i in range(self.nif(-1))]
2660        nchans = filter(lambda t: t > 0, nchans)
2661        return (sum(nchans)/len(nchans) == nchans[0])
2662
2663    @asaplog_post_dec
2664    #def _fill(self, names, unit, average, getpt, antenna):
2665    def _fill(self, names, unit, average, opts={}):
2666        first = True
2667        fullnames = []
2668        for name in names:
2669            name = os.path.expandvars(name)
2670            name = os.path.expanduser(name)
2671            if not os.path.exists(name):
2672                msg = "File '%s' does not exists" % (name)
2673                raise IOError(msg)
2674            fullnames.append(name)
2675        if average:
2676            asaplog.push('Auto averaging integrations')
2677        stype = int(rcParams['scantable.storage'].lower() == 'disk')
2678        for name in fullnames:
2679            tbl = Scantable(stype)
2680            r = filler(tbl)
2681            rx = rcParams['scantable.reference']
2682            r.setreferenceexpr(rx)
2683            msg = "Importing %s..." % (name)
2684            asaplog.push(msg, False)
2685            #opts = {'ms': {'antenna' : antenna, 'getpt': getpt} }
2686            r.open(name, opts)# antenna, -1, -1, getpt)
2687            r.fill()
2688            if average:
2689                tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
2690            if not first:
2691                tbl = self._math._merge([self, tbl])
2692            Scantable.__init__(self, tbl)
2693            r.close()
2694            del r, tbl
2695            first = False
2696            #flush log
2697        asaplog.post()
2698        if unit is not None:
2699            self.set_fluxunit(unit)
2700        if not is_casapy():
2701            self.set_freqframe(rcParams['scantable.freqframe'])
2702
2703    def __getitem__(self, key):
2704        if key < 0:
2705            key += self.nrow()
2706        if key >= self.nrow():
2707            raise IndexError("Row index out of range.")
2708        return self._getspectrum(key)
2709
2710    def __setitem__(self, key, value):
2711        if key < 0:
2712            key += self.nrow()
2713        if key >= self.nrow():
2714            raise IndexError("Row index out of range.")
2715        if not hasattr(value, "__len__") or \
2716                len(value) > self.nchan(self.getif(key)):
2717            raise ValueError("Spectrum length doesn't match.")
2718        return self._setspectrum(value, key)
2719
2720    def __len__(self):
2721        return self.nrow()
2722
2723    def __iter__(self):
2724        for i in range(len(self)):
2725            yield self[i]
Note: See TracBrowser for help on using the repository browser.