source: trunk/python/scantable.py @ 1919

Last change on this file since 1919 was 1919, checked in by Takeshi Nakazato, 14 years ago

New Development: No

JIRA Issue: No

Ready for Test: Yes

Interface Changes: No

What Interface Changed: Please list interface changes

Test Programs: ori_sio_task_regression

Put in Release Notes: No

Module(s): Module Names change impacts.

Description: Describe your changes here...

Changed variables for memory address in C++ int to long.
I don't believe that this change essentially fixes the problem.
However, it may improve the situation since 'long' is able to
represent larger integer value than 'int'.


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 92.8 KB
Line 
1"""This module defines the scantable class."""
2
3import os
4try:
5    from functools import wraps as wraps_dec
6except ImportError:
7    from asap.compatibility import wraps as wraps_dec
8
9from asap.env import is_casapy
10from asap._asap import Scantable
11from asap._asap import filler
12from asap.parameters import rcParams
13from asap.logging import asaplog, asaplog_post_dec
14from asap.selector import selector
15from asap.linecatalog import linecatalog
16from asap.coordinate import coordinate
17from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
18from asap.asapfitter import fitter
19
20
21def preserve_selection(func):
22    @wraps_dec(func)
23    def wrap(obj, *args, **kw):
24        basesel = obj.get_selection()
25        try:
26            val = func(obj, *args, **kw)
27        finally:
28            obj.set_selection(basesel)
29        return val
30    return wrap
31
32def is_scantable(filename):
33    """Is the given file a scantable?
34
35    Parameters:
36
37        filename: the name of the file/directory to test
38
39    """
40    if ( os.path.isdir(filename)
41         and os.path.exists(filename+'/table.info')
42         and os.path.exists(filename+'/table.dat') ):
43        f=open(filename+'/table.info')
44        l=f.readline()
45        f.close()
46        #if ( l.find('Scantable') != -1 ):
47        if ( l.find('Measurement Set') == -1 ):
48            return True
49        else:
50            return False
51    else:
52        return False
53##     return (os.path.isdir(filename)
54##             and not os.path.exists(filename+'/table.f1')
55##             and os.path.exists(filename+'/table.info'))
56
57def is_ms(filename):
58    """Is the given file a MeasurementSet?
59
60    Parameters:
61
62        filename: the name of the file/directory to test
63
64    """
65    if ( os.path.isdir(filename)
66         and os.path.exists(filename+'/table.info')
67         and os.path.exists(filename+'/table.dat') ):
68        f=open(filename+'/table.info')
69        l=f.readline()
70        f.close()
71        if ( l.find('Measurement Set') != -1 ):
72            return True
73        else:
74            return False
75    else:
76        return False
77   
78class scantable(Scantable):
79    """\
80        The ASAP container for scans (single-dish data).
81    """
82
83    @asaplog_post_dec
84    #def __init__(self, filename, average=None, unit=None, getpt=None,
85    #             antenna=None, parallactify=None):
86    def __init__(self, filename, average=None, unit=None, parallactify=None, **args):
87        """\
88        Create a scantable from a saved one or make a reference
89
90        Parameters:
91
92            filename:     the name of an asap table on disk
93                          or
94                          the name of a rpfits/sdfits/ms file
95                          (integrations within scans are auto averaged
96                          and the whole file is read) or
97                          [advanced] a reference to an existing scantable
98
99            average:      average all integrations withinb a scan on read.
100                          The default (True) is taken from .asaprc.
101
102            unit:         brightness unit; must be consistent with K or Jy.
103                          Over-rides the default selected by the filler
104                          (input rpfits/sdfits/ms) or replaces the value
105                          in existing scantables
106
107            getpt:        for MeasurementSet input data only:
108                          If True, all pointing data are filled.
109                          The deafult is False, which makes time to load
110                          the MS data faster in some cases.
111
112            antenna:      Antenna selection. integer (id) or string (name or id).
113
114            parallactify: Indicate that the data had been parallatified. Default
115                          is taken from rc file.
116
117        """
118        if average is None:
119            average = rcParams['scantable.autoaverage']
120        #if getpt is None:
121        #    getpt = True
122        #if antenna is not None:
123        #    asaplog.push("Antenna selection currently unsupported."
124        #                 "Using ''")
125        #    asaplog.post('WARN')
126        #if antenna is None:
127        #    antenna = ''
128        #elif type(antenna) == int:
129        #    antenna = '%s' % antenna
130        #elif type(antenna) == list:
131        #    tmpstr = ''
132        #    for i in range( len(antenna) ):
133        #        if type(antenna[i]) == int:
134        #            tmpstr = tmpstr + ('%s,'%(antenna[i]))
135        #        elif type(antenna[i]) == str:
136        #            tmpstr=tmpstr+antenna[i]+','
137        #        else:
138        #            raise TypeError('Bad antenna selection.')
139        #    antenna = tmpstr.rstrip(',')
140        parallactify = parallactify or rcParams['scantable.parallactify']
141        varlist = vars()
142        from asap._asap import stmath
143        self._math = stmath( rcParams['insitu'] )
144        if isinstance(filename, Scantable):
145            Scantable.__init__(self, filename)
146        else:
147            if isinstance(filename, str):
148                filename = os.path.expandvars(filename)
149                filename = os.path.expanduser(filename)
150                if not os.path.exists(filename):
151                    s = "File '%s' not found." % (filename)
152                    raise IOError(s)
153                if is_scantable(filename):
154                    ondisk = rcParams['scantable.storage'] == 'disk'
155                    Scantable.__init__(self, filename, ondisk)
156                    if unit is not None:
157                        self.set_fluxunit(unit)
158                    # do not reset to the default freqframe
159                    #self.set_freqframe(rcParams['scantable.freqframe'])
160                #elif os.path.isdir(filename) \
161                #         and not os.path.exists(filename+'/table.f1'):
162                elif is_ms(filename):
163                    # Measurement Set
164                    opts={'ms': {}}
165                    mskeys=['getpt','antenna']
166                    for key in mskeys:
167                        if key in args.keys():
168                            opts['ms'][key] = args[key]
169                    #self._fill([filename], unit, average, getpt, antenna)
170                    self._fill([filename], unit, average, opts)
171                elif os.path.isfile(filename):
172                    #self._fill([filename], unit, average, getpt, antenna)
173                    self._fill([filename], unit, average)
174                else:
175                    msg = "The given file '%s'is not a valid " \
176                          "asap table." % (filename)
177                    raise IOError(msg)
178            elif (isinstance(filename, list) or isinstance(filename, tuple)) \
179                  and isinstance(filename[-1], str):
180                #self._fill(filename, unit, average, getpt, antenna)
181                self._fill(filename, unit, average)
182        self.parallactify(parallactify)
183        self._add_history("scantable", varlist)
184
185    @asaplog_post_dec
186    def save(self, name=None, format=None, overwrite=False):
187        """\
188        Store the scantable on disk. This can be an asap (aips++) Table,
189        SDFITS or MS2 format.
190
191        Parameters:
192
193            name:        the name of the outputfile. For format "ASCII"
194                         this is the root file name (data in 'name'.txt
195                         and header in 'name'_header.txt)
196
197            format:      an optional file format. Default is ASAP.
198                         Allowed are:
199
200                            * 'ASAP' (save as ASAP [aips++] Table),
201                            * 'SDFITS' (save as SDFITS file)
202                            * 'ASCII' (saves as ascii text file)
203                            * 'MS2' (saves as an casacore MeasurementSet V2)
204                            * 'FITS' (save as image FITS - not readable by class)
205                            * 'CLASS' (save as FITS readable by CLASS)
206
207            overwrite:   If the file should be overwritten if it exists.
208                         The default False is to return with warning
209                         without writing the output. USE WITH CARE.
210
211        Example::
212
213            scan.save('myscan.asap')
214            scan.save('myscan.sdfits', 'SDFITS')
215
216        """
217        from os import path
218        format = format or rcParams['scantable.save']
219        suffix = '.'+format.lower()
220        if name is None or name == "":
221            name = 'scantable'+suffix
222            msg = "No filename given. Using default name %s..." % name
223            asaplog.push(msg)
224        name = path.expandvars(name)
225        if path.isfile(name) or path.isdir(name):
226            if not overwrite:
227                msg = "File %s exists." % name
228                raise IOError(msg)
229        format2 = format.upper()
230        if format2 == 'ASAP':
231            self._save(name)
232        else:
233            from asap._asap import stwriter as stw
234            writer = stw(format2)
235            writer.write(self, name)
236        return
237
238    def copy(self):
239        """Return a copy of this scantable.
240
241        *Note*:
242
243            This makes a full (deep) copy. scan2 = scan1 makes a reference.
244
245        Example::
246
247            copiedscan = scan.copy()
248
249        """
250        sd = scantable(Scantable._copy(self))
251        return sd
252
253    def drop_scan(self, scanid=None):
254        """\
255        Return a new scantable where the specified scan number(s) has(have)
256        been dropped.
257
258        Parameters:
259
260            scanid:    a (list of) scan number(s)
261
262        """
263        from asap import _is_sequence_or_number as _is_valid
264        from asap import _to_list
265        from asap import unique
266        if not _is_valid(scanid):
267            raise RuntimeError( 'Please specify a scanno to drop from the scantable' )
268        scanid = _to_list(scanid)
269        allscans = unique([ self.getscan(i) for i in range(self.nrow())])
270        for sid in scanid: allscans.remove(sid)
271        if len(allscans) == 0:
272            raise ValueError("Can't remove all scans")
273        sel = selector(scans=allscans)
274        return self._select_copy(sel)
275
276    def _select_copy(self, selection):
277        orig = self.get_selection()
278        self.set_selection(orig+selection)
279        cp = self.copy()
280        self.set_selection(orig)
281        return cp
282
283    def get_scan(self, scanid=None):
284        """\
285        Return a specific scan (by scanno) or collection of scans (by
286        source name) in a new scantable.
287
288        *Note*:
289
290            See scantable.drop_scan() for the inverse operation.
291
292        Parameters:
293
294            scanid:    a (list of) scanno or a source name, unix-style
295                       patterns are accepted for source name matching, e.g.
296                       '*_R' gets all 'ref scans
297
298        Example::
299
300            # get all scans containing the source '323p459'
301            newscan = scan.get_scan('323p459')
302            # get all 'off' scans
303            refscans = scan.get_scan('*_R')
304            # get a susbset of scans by scanno (as listed in scan.summary())
305            newscan = scan.get_scan([0, 2, 7, 10])
306
307        """
308        if scanid is None:
309            raise RuntimeError( 'Please specify a scan no or name to '
310                                'retrieve from the scantable' )
311        try:
312            bsel = self.get_selection()
313            sel = selector()
314            if type(scanid) is str:
315                sel.set_name(scanid)
316                return self._select_copy(sel)
317            elif type(scanid) is int:
318                sel.set_scans([scanid])
319                return self._select_copy(sel)
320            elif type(scanid) is list:
321                sel.set_scans(scanid)
322                return self._select_copy(sel)
323            else:
324                msg = "Illegal scanid type, use 'int' or 'list' if ints."
325                raise TypeError(msg)
326        except RuntimeError:
327            raise
328
329    def __str__(self):
330        return Scantable._summary(self, True)
331
332    def summary(self, filename=None):
333        """\
334        Print a summary of the contents of this scantable.
335
336        Parameters:
337
338            filename:    the name of a file to write the putput to
339                         Default - no file output
340
341        """
342        info = Scantable._summary(self, True)
343        if filename is not None:
344            if filename is "":
345                filename = 'scantable_summary.txt'
346            from os.path import expandvars, isdir
347            filename = expandvars(filename)
348            if not isdir(filename):
349                data = open(filename, 'w')
350                data.write(info)
351                data.close()
352            else:
353                msg = "Illegal file name '%s'." % (filename)
354                raise IOError(msg)
355        return page(info)
356
357    def get_spectrum(self, rowno):
358        """Return the spectrum for the current row in the scantable as a list.
359
360        Parameters:
361
362             rowno:   the row number to retrieve the spectrum from
363
364        """
365        return self._getspectrum(rowno)
366
367    def get_mask(self, rowno):
368        """Return the mask for the current row in the scantable as a list.
369
370        Parameters:
371
372             rowno:   the row number to retrieve the mask from
373
374        """
375        return self._getmask(rowno)
376
377    def set_spectrum(self, spec, rowno):
378        """Return the spectrum for the current row in the scantable as a list.
379
380        Parameters:
381
382             spec:   the new spectrum
383
384             rowno:  the row number to set the spectrum for
385
386        """
387        assert(len(spec) == self.nchan())
388        return self._setspectrum(spec, rowno)
389
390    def get_coordinate(self, rowno):
391        """Return the (spectral) coordinate for a a given 'rowno'.
392
393        *Note*:
394
395            * This coordinate is only valid until a scantable method modifies
396              the frequency axis.
397            * This coordinate does contain the original frequency set-up
398              NOT the new frame. The conversions however are done using the user
399              specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
400              use scantable.freq_align first. Without it there is no closure,
401              i.e.::
402
403                  c = myscan.get_coordinate(0)
404                  c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
405
406        Parameters:
407
408             rowno:    the row number for the spectral coordinate
409
410        """
411        return coordinate(Scantable.get_coordinate(self, rowno))
412
413    def get_selection(self):
414        """\
415        Get the selection object currently set on this scantable.
416
417        Example::
418
419            sel = scan.get_selection()
420            sel.set_ifs(0)              # select IF 0
421            scan.set_selection(sel)     # apply modified selection
422
423        """
424        return selector(self._getselection())
425
426    def set_selection(self, selection=None, **kw):
427        """\
428        Select a subset of the data. All following operations on this scantable
429        are only applied to thi selection.
430
431        Parameters:
432
433            selection:    a selector object (default unset the selection), or
434                          any combination of "pols", "ifs", "beams", "scans",
435                          "cycles", "name", "query"
436
437        Examples::
438
439            sel = selector()         # create a selection object
440            self.set_scans([0, 3])    # select SCANNO 0 and 3
441            scan.set_selection(sel)  # set the selection
442            scan.summary()           # will only print summary of scanno 0 an 3
443            scan.set_selection()     # unset the selection
444            # or the equivalent
445            scan.set_selection(scans=[0,3])
446            scan.summary()           # will only print summary of scanno 0 an 3
447            scan.set_selection()     # unset the selection
448
449        """
450        if selection is None:
451            # reset
452            if len(kw) == 0:
453                selection = selector()
454            else:
455                # try keywords
456                for k in kw:
457                    if k not in selector.fields:
458                        raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
459                selection = selector(**kw)
460        self._setselection(selection)
461
462    def get_row(self, row=0, insitu=None):
463        """\
464        Select a row in the scantable.
465        Return a scantable with single row.
466
467        Parameters:
468
469            row:    row no of integration, default is 0.
470            insitu: if False a new scantable is returned. Otherwise, the
471                    scaling is done in-situ. The default is taken from .asaprc
472                    (False)
473
474        """
475        if insitu is None: insitu = rcParams['insitu']
476        if not insitu:
477            workscan = self.copy()
478        else:
479            workscan = self
480        # Select a row
481        sel=selector()
482        sel.set_scans([workscan.getscan(row)])
483        sel.set_cycles([workscan.getcycle(row)])
484        sel.set_beams([workscan.getbeam(row)])
485        sel.set_ifs([workscan.getif(row)])
486        sel.set_polarisations([workscan.getpol(row)])
487        sel.set_name(workscan._getsourcename(row))
488        workscan.set_selection(sel)
489        if not workscan.nrow() == 1:
490            msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
491            raise RuntimeError(msg)
492        del sel
493        if insitu:
494            self._assign(workscan)
495        else:
496            return workscan
497
498    @asaplog_post_dec
499    def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
500        """\
501        Determine the specified statistic of the current beam/if/pol
502        Takes a 'mask' as an optional parameter to specify which
503        channels should be excluded.
504
505        Parameters:
506
507            stat:    'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
508                     'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
509
510            mask:    an optional mask specifying where the statistic
511                     should be determined.
512
513            form:    format string to print statistic values
514
515            row:     row number of spectrum to process.
516                     (default is None: for all rows)
517
518        Example:
519            scan.set_unit('channel')
520            msk = scan.create_mask([100, 200], [500, 600])
521            scan.stats(stat='mean', mask=m)
522
523        """
524        mask = mask or []
525        if not self._check_ifs():
526            raise ValueError("Cannot apply mask as the IFs have different "
527                             "number of channels. Please use setselection() "
528                             "to select individual IFs")
529        rtnabc = False
530        if stat.lower().endswith('_abc'): rtnabc = True
531        getchan = False
532        if stat.lower().startswith('min') or stat.lower().startswith('max'):
533            chan = self._math._minmaxchan(self, mask, stat)
534            getchan = True
535            statvals = []
536        if not rtnabc:
537            if row == None:
538                statvals = self._math._stats(self, mask, stat)
539            else:
540                statvals = self._math._statsrow(self, mask, stat, int(row))
541
542        #def cb(i):
543        #    return statvals[i]
544
545        #return self._row_callback(cb, stat)
546
547        label=stat
548        #callback=cb
549        out = ""
550        #outvec = []
551        sep = '-'*50
552
553        if row == None:
554            rows = xrange(self.nrow())
555        elif isinstance(row, int):
556            rows = [ row ]
557
558        for i in rows:
559            refstr = ''
560            statunit= ''
561            if getchan:
562                qx, qy = self.chan2data(rowno=i, chan=chan[i])
563                if rtnabc:
564                    statvals.append(qx['value'])
565                    refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
566                    statunit= '['+qx['unit']+']'
567                else:
568                    refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
569
570            tm = self._gettime(i)
571            src = self._getsourcename(i)
572            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
573            out += 'Time[%s]:\n' % (tm)
574            if self.nbeam(-1) > 1: out +=  ' Beam[%d] ' % (self.getbeam(i))
575            if self.nif(-1) > 1:   out +=  ' IF[%d] ' % (self.getif(i))
576            if self.npol(-1) > 1:  out +=  ' Pol[%d] ' % (self.getpol(i))
577            #outvec.append(callback(i))
578            if len(rows) > 1:
579                # out += ('= %'+form) % (outvec[i]) +'   '+refstr+'\n'
580                out += ('= %'+form) % (statvals[i]) +'   '+refstr+'\n'
581            else:
582                # out += ('= %'+form) % (outvec[0]) +'   '+refstr+'\n'
583                out += ('= %'+form) % (statvals[0]) +'   '+refstr+'\n'
584            out +=  sep+"\n"
585
586        import os
587        if os.environ.has_key( 'USER' ):
588            usr = os.environ['USER']
589        else:
590            import commands
591            usr = commands.getoutput( 'whoami' )
592        tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
593        f = open(tmpfile,'w')
594        print >> f, sep
595        print >> f, ' %s %s' % (label, statunit)
596        print >> f, sep
597        print >> f, out
598        f.close()
599        f = open(tmpfile,'r')
600        x = f.readlines()
601        f.close()
602        asaplog.push(''.join(x), False)
603
604        return statvals
605
606    def chan2data(self, rowno=0, chan=0):
607        """\
608        Returns channel/frequency/velocity and spectral value
609        at an arbitrary row and channel in the scantable.
610
611        Parameters:
612
613            rowno:   a row number in the scantable. Default is the
614                     first row, i.e. rowno=0
615
616            chan:    a channel in the scantable. Default is the first
617                     channel, i.e. pos=0
618
619        """
620        if isinstance(rowno, int) and isinstance(chan, int):
621            qx = {'unit': self.get_unit(),
622                  'value': self._getabcissa(rowno)[chan]}
623            qy = {'unit': self.get_fluxunit(),
624                  'value': self._getspectrum(rowno)[chan]}
625            return qx, qy
626
627    def stddev(self, mask=None):
628        """\
629        Determine the standard deviation of the current beam/if/pol
630        Takes a 'mask' as an optional parameter to specify which
631        channels should be excluded.
632
633        Parameters:
634
635            mask:    an optional mask specifying where the standard
636                     deviation should be determined.
637
638        Example::
639
640            scan.set_unit('channel')
641            msk = scan.create_mask([100, 200], [500, 600])
642            scan.stddev(mask=m)
643
644        """
645        return self.stats(stat='stddev', mask=mask);
646
647
648    def get_column_names(self):
649        """\
650        Return a  list of column names, which can be used for selection.
651        """
652        return list(Scantable.get_column_names(self))
653
654    def get_tsys(self, row=-1):
655        """\
656        Return the System temperatures.
657
658        Parameters:
659
660            row:    the rowno to get the information for. (default all rows)
661
662        Returns:
663
664            a list of Tsys values for the current selection
665
666        """
667        if row > -1:
668            return self._get_column(self._gettsys, row)
669        return self._row_callback(self._gettsys, "Tsys")
670
671
672    def get_weather(self, row=-1):
673        """\
674        Return the weather informations.
675
676        Parameters:
677
678            row:    the rowno to get the information for. (default all rows)
679
680        Returns:
681
682            a dict or list of of dicts of values for the current selection
683
684        """
685
686        values = self._get_column(self._get_weather, row)
687        if row > -1:
688            return {'temperature': values[0],
689                    'pressure': values[1], 'humidity' : values[2],
690                    'windspeed' : values[3], 'windaz' : values[4]
691                    }
692        else:
693            out = []
694            for r in values:
695
696                out.append({'temperature': r[0],
697                            'pressure': r[1], 'humidity' : r[2],
698                            'windspeed' : r[3], 'windaz' : r[4]
699                    })
700            return out
701
702    def _row_callback(self, callback, label):
703        out = ""
704        outvec = []
705        sep = '-'*50
706        for i in range(self.nrow()):
707            tm = self._gettime(i)
708            src = self._getsourcename(i)
709            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
710            out += 'Time[%s]:\n' % (tm)
711            if self.nbeam(-1) > 1:
712                out +=  ' Beam[%d] ' % (self.getbeam(i))
713            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (self.getif(i))
714            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (self.getpol(i))
715            outvec.append(callback(i))
716            out += '= %3.3f\n' % (outvec[i])
717            out +=  sep+'\n'
718
719        asaplog.push(sep)
720        asaplog.push(" %s" % (label))
721        asaplog.push(sep)
722        asaplog.push(out)
723        asaplog.post()
724        return outvec
725
726    def _get_column(self, callback, row=-1):
727        """
728        """
729        if row == -1:
730            return [callback(i) for i in range(self.nrow())]
731        else:
732            if  0 <= row < self.nrow():
733                return callback(row)
734
735
736    def get_time(self, row=-1, asdatetime=False):
737        """\
738        Get a list of time stamps for the observations.
739        Return a datetime object for each integration time stamp in the scantable.
740
741        Parameters:
742
743            row:          row no of integration. Default -1 return all rows
744
745            asdatetime:   return values as datetime objects rather than strings
746
747        """
748        from time import strptime
749        from datetime import datetime
750        times = self._get_column(self._gettime, row)
751        if not asdatetime:
752            return times
753        format = "%Y/%m/%d/%H:%M:%S"
754        if isinstance(times, list):
755            return [datetime(*strptime(i, format)[:6]) for i in times]
756        else:
757            return datetime(*strptime(times, format)[:6])
758
759
760    def get_inttime(self, row=-1):
761        """\
762        Get a list of integration times for the observations.
763        Return a time in seconds for each integration in the scantable.
764
765        Parameters:
766
767            row:    row no of integration. Default -1 return all rows.
768
769        """
770        return self._get_column(self._getinttime, row)
771
772
773    def get_sourcename(self, row=-1):
774        """\
775        Get a list source names for the observations.
776        Return a string for each integration in the scantable.
777        Parameters:
778
779            row:    row no of integration. Default -1 return all rows.
780
781        """
782        return self._get_column(self._getsourcename, row)
783
784    def get_elevation(self, row=-1):
785        """\
786        Get a list of elevations for the observations.
787        Return a float for each integration in the scantable.
788
789        Parameters:
790
791            row:    row no of integration. Default -1 return all rows.
792
793        """
794        return self._get_column(self._getelevation, row)
795
796    def get_azimuth(self, row=-1):
797        """\
798        Get a list of azimuths for the observations.
799        Return a float for each integration in the scantable.
800
801        Parameters:
802            row:    row no of integration. Default -1 return all rows.
803
804        """
805        return self._get_column(self._getazimuth, row)
806
807    def get_parangle(self, row=-1):
808        """\
809        Get a list of parallactic angles for the observations.
810        Return a float for each integration in the scantable.
811
812        Parameters:
813
814            row:    row no of integration. Default -1 return all rows.
815
816        """
817        return self._get_column(self._getparangle, row)
818
819    def get_direction(self, row=-1):
820        """
821        Get a list of Positions on the sky (direction) for the observations.
822        Return a string for each integration in the scantable.
823
824        Parameters:
825
826            row:    row no of integration. Default -1 return all rows
827
828        """
829        return self._get_column(self._getdirection, row)
830
831    def get_directionval(self, row=-1):
832        """\
833        Get a list of Positions on the sky (direction) for the observations.
834        Return a float for each integration in the scantable.
835
836        Parameters:
837
838            row:    row no of integration. Default -1 return all rows
839
840        """
841        return self._get_column(self._getdirectionvec, row)
842
843    @asaplog_post_dec
844    def set_unit(self, unit='channel'):
845        """\
846        Set the unit for all following operations on this scantable
847
848        Parameters:
849
850            unit:    optional unit, default is 'channel'. Use one of '*Hz',
851                     'km/s', 'channel' or equivalent ''
852
853        """
854        varlist = vars()
855        if unit in ['', 'pixel', 'channel']:
856            unit = ''
857        inf = list(self._getcoordinfo())
858        inf[0] = unit
859        self._setcoordinfo(inf)
860        self._add_history("set_unit", varlist)
861
862    @asaplog_post_dec
863    def set_instrument(self, instr):
864        """\
865        Set the instrument for subsequent processing.
866
867        Parameters:
868
869            instr:    Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
870                      'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
871
872        """
873        self._setInstrument(instr)
874        self._add_history("set_instument", vars())
875
876    @asaplog_post_dec
877    def set_feedtype(self, feedtype):
878        """\
879        Overwrite the feed type, which might not be set correctly.
880
881        Parameters:
882
883            feedtype:     'linear' or 'circular'
884
885        """
886        self._setfeedtype(feedtype)
887        self._add_history("set_feedtype", vars())
888
889    @asaplog_post_dec
890    def set_doppler(self, doppler='RADIO'):
891        """\
892        Set the doppler for all following operations on this scantable.
893
894        Parameters:
895
896            doppler:    One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
897
898        """
899        varlist = vars()
900        inf = list(self._getcoordinfo())
901        inf[2] = doppler
902        self._setcoordinfo(inf)
903        self._add_history("set_doppler", vars())
904
905    @asaplog_post_dec
906    def set_freqframe(self, frame=None):
907        """\
908        Set the frame type of the Spectral Axis.
909
910        Parameters:
911
912            frame:   an optional frame type, default 'LSRK'. Valid frames are:
913                     'TOPO', 'LSRD', 'LSRK', 'BARY',
914                     'GEO', 'GALACTO', 'LGROUP', 'CMB'
915
916        Example::
917
918            scan.set_freqframe('BARY')
919
920        """
921        frame = frame or rcParams['scantable.freqframe']
922        varlist = vars()
923        # "REST" is not implemented in casacore
924        #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
925        #           'GEO', 'GALACTO', 'LGROUP', 'CMB']
926        valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
927                   'GEO', 'GALACTO', 'LGROUP', 'CMB']
928
929        if frame in valid:
930            inf = list(self._getcoordinfo())
931            inf[1] = frame
932            self._setcoordinfo(inf)
933            self._add_history("set_freqframe", varlist)
934        else:
935            msg  = "Please specify a valid freq type. Valid types are:\n", valid
936            raise TypeError(msg)
937
938    @asaplog_post_dec
939    def set_dirframe(self, frame=""):
940        """\
941        Set the frame type of the Direction on the sky.
942
943        Parameters:
944
945            frame:   an optional frame type, default ''. Valid frames are:
946                     'J2000', 'B1950', 'GALACTIC'
947
948        Example:
949
950            scan.set_dirframe('GALACTIC')
951
952        """
953        varlist = vars()
954        Scantable.set_dirframe(self, frame)
955        self._add_history("set_dirframe", varlist)
956
957    def get_unit(self):
958        """\
959        Get the default unit set in this scantable
960
961        Returns:
962
963            A unit string
964
965        """
966        inf = self._getcoordinfo()
967        unit = inf[0]
968        if unit == '': unit = 'channel'
969        return unit
970
971    @asaplog_post_dec
972    def get_abcissa(self, rowno=0):
973        """\
974        Get the abcissa in the current coordinate setup for the currently
975        selected Beam/IF/Pol
976
977        Parameters:
978
979            rowno:    an optional row number in the scantable. Default is the
980                      first row, i.e. rowno=0
981
982        Returns:
983
984            The abcissa values and the format string (as a dictionary)
985
986        """
987        abc = self._getabcissa(rowno)
988        lbl = self._getabcissalabel(rowno)
989        return abc, lbl
990
991    @asaplog_post_dec
992    def flag(self, mask=None, unflag=False):
993        """\
994        Flag the selected data using an optional channel mask.
995
996        Parameters:
997
998            mask:   an optional channel mask, created with create_mask. Default
999                    (no mask) is all channels.
1000
1001            unflag:    if True, unflag the data
1002
1003        """
1004        varlist = vars()
1005        mask = mask or []
1006        self._flag(mask, unflag)
1007        self._add_history("flag", varlist)
1008
1009    @asaplog_post_dec
1010    def flag_row(self, rows=[], unflag=False):
1011        """\
1012        Flag the selected data in row-based manner.
1013
1014        Parameters:
1015
1016            rows:   list of row numbers to be flagged. Default is no row
1017                    (must be explicitly specified to execute row-based flagging).
1018
1019            unflag: if True, unflag the data.
1020
1021        """
1022        varlist = vars()
1023        self._flag_row(rows, unflag)
1024        self._add_history("flag_row", varlist)
1025
1026    @asaplog_post_dec
1027    def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
1028        """\
1029        Flag the selected data outside a specified range (in channel-base)
1030
1031        Parameters:
1032
1033            uthres:      upper threshold.
1034
1035            dthres:      lower threshold
1036
1037            clipoutside: True for flagging data outside the range [dthres:uthres].
1038                         False for glagging data inside the range.
1039
1040            unflag:      if True, unflag the data.
1041
1042        """
1043        varlist = vars()
1044        self._clip(uthres, dthres, clipoutside, unflag)
1045        self._add_history("clip", varlist)
1046
1047    @asaplog_post_dec
1048    def lag_flag(self, start, end, unit="MHz", insitu=None):
1049        """\
1050        Flag the data in 'lag' space by providing a frequency to remove.
1051        Flagged data in the scantable gets interpolated over the region.
1052        No taper is applied.
1053
1054        Parameters:
1055
1056            start:    the start frequency (really a period within the
1057                      bandwidth)  or period to remove
1058
1059            end:      the end frequency or period to remove
1060
1061            unit:     the frequency unit (default "MHz") or "" for
1062                      explicit lag channels
1063
1064        *Notes*:
1065
1066            It is recommended to flag edges of the band or strong
1067            signals beforehand.
1068
1069        """
1070        if insitu is None: insitu = rcParams['insitu']
1071        self._math._setinsitu(insitu)
1072        varlist = vars()
1073        base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1074        if not (unit == "" or base.has_key(unit)):
1075            raise ValueError("%s is not a valid unit." % unit)
1076        if unit == "":
1077            s = scantable(self._math._lag_flag(self, start, end, "lags"))
1078        else:
1079            s = scantable(self._math._lag_flag(self, start*base[unit],
1080                                               end*base[unit], "frequency"))
1081        s._add_history("lag_flag", varlist)
1082        if insitu:
1083            self._assign(s)
1084        else:
1085            return s
1086
1087    @asaplog_post_dec
1088    def create_mask(self, *args, **kwargs):
1089        """\
1090        Compute and return a mask based on [min, max] windows.
1091        The specified windows are to be INCLUDED, when the mask is
1092        applied.
1093
1094        Parameters:
1095
1096            [min, max], [min2, max2], ...
1097                Pairs of start/end points (inclusive)specifying the regions
1098                to be masked
1099
1100            invert:     optional argument. If specified as True,
1101                        return an inverted mask, i.e. the regions
1102                        specified are EXCLUDED
1103
1104            row:        create the mask using the specified row for
1105                        unit conversions, default is row=0
1106                        only necessary if frequency varies over rows.
1107
1108        Examples::
1109
1110            scan.set_unit('channel')
1111            # a)
1112            msk = scan.create_mask([400, 500], [800, 900])
1113            # masks everything outside 400 and 500
1114            # and 800 and 900 in the unit 'channel'
1115
1116            # b)
1117            msk = scan.create_mask([400, 500], [800, 900], invert=True)
1118            # masks the regions between 400 and 500
1119            # and 800 and 900 in the unit 'channel'
1120
1121            # c)
1122            #mask only channel 400
1123            msk =  scan.create_mask([400])
1124
1125        """
1126        row = kwargs.get("row", 0)
1127        data = self._getabcissa(row)
1128        u = self._getcoordinfo()[0]
1129        if u == "":
1130            u = "channel"
1131        msg = "The current mask window unit is %s" % u
1132        i = self._check_ifs()
1133        if not i:
1134            msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1135        asaplog.push(msg)
1136        n = self.nchan()
1137        msk = _n_bools(n, False)
1138        # test if args is a 'list' or a 'normal *args - UGLY!!!
1139
1140        ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1141             and args or args[0]
1142        for window in ws:
1143            if len(window) == 1:
1144                window = [window[0], window[0]]
1145            if len(window) == 0 or  len(window) > 2:
1146                raise ValueError("A window needs to be defined as [start(, end)]")
1147            if window[0] > window[1]:
1148                tmp = window[0]
1149                window[0] = window[1]
1150                window[1] = tmp
1151            for i in range(n):
1152                if data[i] >= window[0] and data[i] <= window[1]:
1153                    msk[i] = True
1154        if kwargs.has_key('invert'):
1155            if kwargs.get('invert'):
1156                msk = mask_not(msk)
1157        return msk
1158
1159    def get_masklist(self, mask=None, row=0):
1160        """\
1161        Compute and return a list of mask windows, [min, max].
1162
1163        Parameters:
1164
1165            mask:       channel mask, created with create_mask.
1166
1167            row:        calcutate the masklist using the specified row
1168                        for unit conversions, default is row=0
1169                        only necessary if frequency varies over rows.
1170
1171        Returns:
1172
1173            [min, max], [min2, max2], ...
1174                Pairs of start/end points (inclusive)specifying
1175                the masked regions
1176
1177        """
1178        if not (isinstance(mask,list) or isinstance(mask, tuple)):
1179            raise TypeError("The mask should be list or tuple.")
1180        if len(mask) < 2:
1181            raise TypeError("The mask elements should be > 1")
1182        if self.nchan() != len(mask):
1183            msg = "Number of channels in scantable != number of mask elements"
1184            raise TypeError(msg)
1185        data = self._getabcissa(row)
1186        u = self._getcoordinfo()[0]
1187        if u == "":
1188            u = "channel"
1189        msg = "The current mask window unit is %s" % u
1190        i = self._check_ifs()
1191        if not i:
1192            msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1193        asaplog.push(msg)
1194        masklist=[]
1195        ist, ien = None, None
1196        ist, ien=self.get_mask_indices(mask)
1197        if ist is not None and ien is not None:
1198            for i in xrange(len(ist)):
1199                range=[data[ist[i]],data[ien[i]]]
1200                range.sort()
1201                masklist.append([range[0],range[1]])
1202        return masklist
1203
1204    def get_mask_indices(self, mask=None):
1205        """\
1206        Compute and Return lists of mask start indices and mask end indices.
1207
1208        Parameters:
1209
1210            mask:       channel mask, created with create_mask.
1211
1212        Returns:
1213
1214            List of mask start indices and that of mask end indices,
1215            i.e., [istart1,istart2,....], [iend1,iend2,....].
1216
1217        """
1218        if not (isinstance(mask,list) or isinstance(mask, tuple)):
1219            raise TypeError("The mask should be list or tuple.")
1220        if len(mask) < 2:
1221            raise TypeError("The mask elements should be > 1")
1222        istart=[]
1223        iend=[]
1224        if mask[0]: istart.append(0)
1225        for i in range(len(mask)-1):
1226            if not mask[i] and mask[i+1]:
1227                istart.append(i+1)
1228            elif mask[i] and not mask[i+1]:
1229                iend.append(i)
1230        if mask[len(mask)-1]: iend.append(len(mask)-1)
1231        if len(istart) != len(iend):
1232            raise RuntimeError("Numbers of mask start != mask end.")
1233        for i in range(len(istart)):
1234            if istart[i] > iend[i]:
1235                raise RuntimeError("Mask start index > mask end index")
1236                break
1237        return istart,iend
1238
1239#    def get_restfreqs(self):
1240#        """
1241#        Get the restfrequency(s) stored in this scantable.
1242#        The return value(s) are always of unit 'Hz'
1243#        Parameters:
1244#            none
1245#        Returns:
1246#            a list of doubles
1247#        """
1248#        return list(self._getrestfreqs())
1249
1250    def get_restfreqs(self, ids=None):
1251        """\
1252        Get the restfrequency(s) stored in this scantable.
1253        The return value(s) are always of unit 'Hz'
1254
1255        Parameters:
1256
1257            ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1258                 be retrieved
1259
1260        Returns:
1261
1262            dictionary containing ids and a list of doubles for each id
1263
1264        """
1265        if ids is None:
1266            rfreqs={}
1267            idlist = self.getmolnos()
1268            for i in idlist:
1269                rfreqs[i]=list(self._getrestfreqs(i))
1270            return rfreqs
1271        else:
1272            if type(ids)==list or type(ids)==tuple:
1273                rfreqs={}
1274                for i in ids:
1275                    rfreqs[i]=list(self._getrestfreqs(i))
1276                return rfreqs
1277            else:
1278                return list(self._getrestfreqs(ids))
1279            #return list(self._getrestfreqs(ids))
1280
1281    def set_restfreqs(self, freqs=None, unit='Hz'):
1282        """\
1283        Set or replace the restfrequency specified and
1284        If the 'freqs' argument holds a scalar,
1285        then that rest frequency will be applied to all the selected
1286        data.  If the 'freqs' argument holds
1287        a vector, then it MUST be of equal or smaller length than
1288        the number of IFs (and the available restfrequencies will be
1289        replaced by this vector).  In this case, *all* data have
1290        the restfrequency set per IF according
1291        to the corresponding value you give in the 'freqs' vector.
1292        E.g. 'freqs=[1e9, 2e9]'  would mean IF 0 gets restfreq 1e9 and
1293        IF 1 gets restfreq 2e9.
1294
1295        You can also specify the frequencies via a linecatalog.
1296
1297        Parameters:
1298
1299            freqs:   list of rest frequency values or string idenitfiers
1300
1301            unit:    unit for rest frequency (default 'Hz')
1302
1303
1304        Example::
1305
1306            # set the given restfrequency for the all currently selected IFs
1307            scan.set_restfreqs(freqs=1.4e9)
1308            # set restfrequencies for the n IFs  (n > 1) in the order of the
1309            # list, i.e
1310            # IF0 -> 1.4e9, IF1 ->  1.41e9, IF3 -> 1.42e9
1311            # len(list_of_restfreqs) == nIF
1312            # for nIF == 1 the following will set multiple restfrequency for
1313            # that IF
1314            scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
1315            # set multiple restfrequencies per IF. as a list of lists where
1316            # the outer list has nIF elements, the inner s arbitrary
1317            scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
1318
1319       *Note*:
1320
1321            To do more sophisticate Restfrequency setting, e.g. on a
1322            source and IF basis, use scantable.set_selection() before using
1323            this function::
1324
1325                # provided your scantable is called scan
1326                selection = selector()
1327                selection.set_name("ORION*")
1328                selection.set_ifs([1])
1329                scan.set_selection(selection)
1330                scan.set_restfreqs(freqs=86.6e9)
1331
1332        """
1333        varlist = vars()
1334        from asap import linecatalog
1335        # simple  value
1336        if isinstance(freqs, int) or isinstance(freqs, float):
1337            self._setrestfreqs([freqs], [""], unit)
1338        # list of values
1339        elif isinstance(freqs, list) or isinstance(freqs, tuple):
1340            # list values are scalars
1341            if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
1342                if len(freqs) == 1:
1343                    self._setrestfreqs(freqs, [""], unit)
1344                else:
1345                    # allow the 'old' mode of setting mulitple IFs
1346                    sel = selector()
1347                    savesel = self._getselection()
1348                    iflist = self.getifnos()
1349                    if len(freqs)>len(iflist):
1350                        raise ValueError("number of elements in list of list "
1351                                         "exeeds the current IF selections")
1352                    iflist = self.getifnos()
1353                    for i, fval in enumerate(freqs):
1354                        sel.set_ifs(iflist[i])
1355                        self._setselection(sel)
1356                        self._setrestfreqs([fval], [""], unit)
1357                    self._setselection(savesel)
1358
1359            # list values are dict, {'value'=, 'name'=)
1360            elif isinstance(freqs[-1], dict):
1361                values = []
1362                names = []
1363                for d in freqs:
1364                    values.append(d["value"])
1365                    names.append(d["name"])
1366                self._setrestfreqs(values, names, unit)
1367            elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
1368                sel = selector()
1369                savesel = self._getselection()
1370                iflist = self.getifnos()
1371                if len(freqs)>len(iflist):
1372                    raise ValueError("number of elements in list of list exeeds"
1373                                     " the current IF selections")
1374                for i, fval in enumerate(freqs):
1375                    sel.set_ifs(iflist[i])
1376                    self._setselection(sel)
1377                    self._setrestfreqs(fval, [""], unit)
1378                self._setselection(savesel)
1379        # freqs are to be taken from a linecatalog
1380        elif isinstance(freqs, linecatalog):
1381            sel = selector()
1382            savesel = self._getselection()
1383            for i in xrange(freqs.nrow()):
1384                sel.set_ifs(iflist[i])
1385                self._setselection(sel)
1386                self._setrestfreqs([freqs.get_frequency(i)],
1387                                   [freqs.get_name(i)], "MHz")
1388                # ensure that we are not iterating past nIF
1389                if i == self.nif()-1: break
1390            self._setselection(savesel)
1391        else:
1392            return
1393        self._add_history("set_restfreqs", varlist)
1394
1395    def shift_refpix(self, delta):
1396        """\
1397        Shift the reference pixel of the Spectra Coordinate by an
1398        integer amount.
1399
1400        Parameters:
1401
1402            delta:   the amount to shift by
1403
1404        *Note*:
1405
1406            Be careful using this with broadband data.
1407
1408        """
1409        Scantable.shift_refpix(self, delta)
1410
1411    @asaplog_post_dec
1412    def history(self, filename=None):
1413        """\
1414        Print the history. Optionally to a file.
1415
1416        Parameters:
1417
1418            filename:    The name  of the file to save the history to.
1419
1420        """
1421        hist = list(self._gethistory())
1422        out = "-"*80
1423        for h in hist:
1424            if h.startswith("---"):
1425                out = "\n".join([out, h])
1426            else:
1427                items = h.split("##")
1428                date = items[0]
1429                func = items[1]
1430                items = items[2:]
1431                out += "\n"+date+"\n"
1432                out += "Function: %s\n  Parameters:" % (func)
1433                for i in items:
1434                    s = i.split("=")
1435                    out += "\n   %s = %s" % (s[0], s[1])
1436                out = "\n".join([out, "-"*80])
1437        if filename is not None:
1438            if filename is "":
1439                filename = 'scantable_history.txt'
1440            import os
1441            filename = os.path.expandvars(os.path.expanduser(filename))
1442            if not os.path.isdir(filename):
1443                data = open(filename, 'w')
1444                data.write(out)
1445                data.close()
1446            else:
1447                msg = "Illegal file name '%s'." % (filename)
1448                raise IOError(msg)
1449        return page(out)
1450    #
1451    # Maths business
1452    #
1453    @asaplog_post_dec
1454    def average_time(self, mask=None, scanav=False, weight='tint', align=False):
1455        """\
1456        Return the (time) weighted average of a scan.
1457
1458        *Note*:
1459
1460            in channels only - align if necessary
1461
1462        Parameters:
1463
1464            mask:     an optional mask (only used for 'var' and 'tsys'
1465                      weighting)
1466
1467            scanav:   True averages each scan separately
1468                      False (default) averages all scans together,
1469
1470            weight:   Weighting scheme.
1471                      'none'     (mean no weight)
1472                      'var'      (1/var(spec) weighted)
1473                      'tsys'     (1/Tsys**2 weighted)
1474                      'tint'     (integration time weighted)
1475                      'tintsys'  (Tint/Tsys**2)
1476                      'median'   ( median averaging)
1477                      The default is 'tint'
1478
1479            align:    align the spectra in velocity before averaging. It takes
1480                      the time of the first spectrum as reference time.
1481
1482        Example::
1483
1484            # time average the scantable without using a mask
1485            newscan = scan.average_time()
1486
1487        """
1488        varlist = vars()
1489        weight = weight or 'TINT'
1490        mask = mask or ()
1491        scanav = (scanav and 'SCAN') or 'NONE'
1492        scan = (self, )
1493
1494        if align:
1495            scan = (self.freq_align(insitu=False), )
1496        s = None
1497        if weight.upper() == 'MEDIAN':
1498            s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1499                                                     scanav))
1500        else:
1501            s = scantable(self._math._average(scan, mask, weight.upper(),
1502                          scanav))
1503        s._add_history("average_time", varlist)
1504        return s
1505
1506    @asaplog_post_dec
1507    def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
1508        """\
1509        Return a scan where all spectra are converted to either
1510        Jansky or Kelvin depending upon the flux units of the scan table.
1511        By default the function tries to look the values up internally.
1512        If it can't find them (or if you want to over-ride), you must
1513        specify EITHER jyperk OR eta (and D which it will try to look up
1514        also if you don't set it). jyperk takes precedence if you set both.
1515
1516        Parameters:
1517
1518            jyperk:      the Jy / K conversion factor
1519
1520            eta:         the aperture efficiency
1521
1522            d:           the geomtric diameter (metres)
1523
1524            insitu:      if False a new scantable is returned.
1525                         Otherwise, the scaling is done in-situ
1526                         The default is taken from .asaprc (False)
1527
1528        """
1529        if insitu is None: insitu = rcParams['insitu']
1530        self._math._setinsitu(insitu)
1531        varlist = vars()
1532        jyperk = jyperk or -1.0
1533        d = d or -1.0
1534        eta = eta or -1.0
1535        s = scantable(self._math._convertflux(self, d, eta, jyperk))
1536        s._add_history("convert_flux", varlist)
1537        if insitu: self._assign(s)
1538        else: return s
1539
1540    @asaplog_post_dec
1541    def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1542        """\
1543        Return a scan after applying a gain-elevation correction.
1544        The correction can be made via either a polynomial or a
1545        table-based interpolation (and extrapolation if necessary).
1546        You specify polynomial coefficients, an ascii table or neither.
1547        If you specify neither, then a polynomial correction will be made
1548        with built in coefficients known for certain telescopes (an error
1549        will occur if the instrument is not known).
1550        The data and Tsys are *divided* by the scaling factors.
1551
1552        Parameters:
1553
1554            poly:        Polynomial coefficients (default None) to compute a
1555                         gain-elevation correction as a function of
1556                         elevation (in degrees).
1557
1558            filename:    The name of an ascii file holding correction factors.
1559                         The first row of the ascii file must give the column
1560                         names and these MUST include columns
1561                         "ELEVATION" (degrees) and "FACTOR" (multiply data
1562                         by this) somewhere.
1563                         The second row must give the data type of the
1564                         column. Use 'R' for Real and 'I' for Integer.
1565                         An example file would be
1566                         (actual factors are arbitrary) :
1567
1568                         TIME ELEVATION FACTOR
1569                         R R R
1570                         0.1 0 0.8
1571                         0.2 20 0.85
1572                         0.3 40 0.9
1573                         0.4 60 0.85
1574                         0.5 80 0.8
1575                         0.6 90 0.75
1576
1577            method:      Interpolation method when correcting from a table.
1578                         Values are  "nearest", "linear" (default), "cubic"
1579                         and "spline"
1580
1581            insitu:      if False a new scantable is returned.
1582                         Otherwise, the scaling is done in-situ
1583                         The default is taken from .asaprc (False)
1584
1585        """
1586
1587        if insitu is None: insitu = rcParams['insitu']
1588        self._math._setinsitu(insitu)
1589        varlist = vars()
1590        poly = poly or ()
1591        from os.path import expandvars
1592        filename = expandvars(filename)
1593        s = scantable(self._math._gainel(self, poly, filename, method))
1594        s._add_history("gain_el", varlist)
1595        if insitu:
1596            self._assign(s)
1597        else:
1598            return s
1599
1600    @asaplog_post_dec
1601    def freq_align(self, reftime=None, method='cubic', insitu=None):
1602        """\
1603        Return a scan where all rows have been aligned in frequency/velocity.
1604        The alignment frequency frame (e.g. LSRK) is that set by function
1605        set_freqframe.
1606
1607        Parameters:
1608
1609            reftime:     reference time to align at. By default, the time of
1610                         the first row of data is used.
1611
1612            method:      Interpolation method for regridding the spectra.
1613                         Choose from "nearest", "linear", "cubic" (default)
1614                         and "spline"
1615
1616            insitu:      if False a new scantable is returned.
1617                         Otherwise, the scaling is done in-situ
1618                         The default is taken from .asaprc (False)
1619
1620        """
1621        if insitu is None: insitu = rcParams["insitu"]
1622        self._math._setinsitu(insitu)
1623        varlist = vars()
1624        reftime = reftime or ""
1625        s = scantable(self._math._freq_align(self, reftime, method))
1626        s._add_history("freq_align", varlist)
1627        if insitu: self._assign(s)
1628        else: return s
1629
1630    @asaplog_post_dec
1631    def opacity(self, tau=None, insitu=None):
1632        """\
1633        Apply an opacity correction. The data
1634        and Tsys are multiplied by the correction factor.
1635
1636        Parameters:
1637
1638            tau:         (list of) opacity from which the correction factor is
1639                         exp(tau*ZD)
1640                         where ZD is the zenith-distance.
1641                         If a list is provided, it has to be of length nIF,
1642                         nIF*nPol or 1 and in order of IF/POL, e.g.
1643                         [opif0pol0, opif0pol1, opif1pol0 ...]
1644                         if tau is `None` the opacities are determined from a
1645                         model.
1646
1647            insitu:      if False a new scantable is returned.
1648                         Otherwise, the scaling is done in-situ
1649                         The default is taken from .asaprc (False)
1650
1651        """
1652        if insitu is None: insitu = rcParams['insitu']
1653        self._math._setinsitu(insitu)
1654        varlist = vars()
1655        if not hasattr(tau, "__len__"):
1656            tau = [tau]
1657        s = scantable(self._math._opacity(self, tau))
1658        s._add_history("opacity", varlist)
1659        if insitu: self._assign(s)
1660        else: return s
1661
1662    @asaplog_post_dec
1663    def bin(self, width=5, insitu=None):
1664        """\
1665        Return a scan where all spectra have been binned up.
1666
1667        Parameters:
1668
1669            width:       The bin width (default=5) in pixels
1670
1671            insitu:      if False a new scantable is returned.
1672                         Otherwise, the scaling is done in-situ
1673                         The default is taken from .asaprc (False)
1674
1675        """
1676        if insitu is None: insitu = rcParams['insitu']
1677        self._math._setinsitu(insitu)
1678        varlist = vars()
1679        s = scantable(self._math._bin(self, width))
1680        s._add_history("bin", varlist)
1681        if insitu:
1682            self._assign(s)
1683        else:
1684            return s
1685
1686    @asaplog_post_dec
1687    def resample(self, width=5, method='cubic', insitu=None):
1688        """\
1689        Return a scan where all spectra have been binned up.
1690
1691        Parameters:
1692
1693            width:       The bin width (default=5) in pixels
1694
1695            method:      Interpolation method when correcting from a table.
1696                         Values are  "nearest", "linear", "cubic" (default)
1697                         and "spline"
1698
1699            insitu:      if False a new scantable is returned.
1700                         Otherwise, the scaling is done in-situ
1701                         The default is taken from .asaprc (False)
1702
1703        """
1704        if insitu is None: insitu = rcParams['insitu']
1705        self._math._setinsitu(insitu)
1706        varlist = vars()
1707        s = scantable(self._math._resample(self, method, width))
1708        s._add_history("resample", varlist)
1709        if insitu: self._assign(s)
1710        else: return s
1711
1712    @asaplog_post_dec
1713    def average_pol(self, mask=None, weight='none'):
1714        """\
1715        Average the Polarisations together.
1716
1717        Parameters:
1718
1719            mask:        An optional mask defining the region, where the
1720                         averaging will be applied. The output will have all
1721                         specified points masked.
1722
1723            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1724                         weighted), or 'tsys' (1/Tsys**2 weighted)
1725
1726        """
1727        varlist = vars()
1728        mask = mask or ()
1729        s = scantable(self._math._averagepol(self, mask, weight.upper()))
1730        s._add_history("average_pol", varlist)
1731        return s
1732
1733    @asaplog_post_dec
1734    def average_beam(self, mask=None, weight='none'):
1735        """\
1736        Average the Beams together.
1737
1738        Parameters:
1739            mask:        An optional mask defining the region, where the
1740                         averaging will be applied. The output will have all
1741                         specified points masked.
1742
1743            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1744                         weighted), or 'tsys' (1/Tsys**2 weighted)
1745
1746        """
1747        varlist = vars()
1748        mask = mask or ()
1749        s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1750        s._add_history("average_beam", varlist)
1751        return s
1752
1753    def parallactify(self, pflag):
1754        """\
1755        Set a flag to indicate whether this data should be treated as having
1756        been 'parallactified' (total phase == 0.0)
1757
1758        Parameters:
1759
1760            pflag:  Bool indicating whether to turn this on (True) or
1761                    off (False)
1762
1763        """
1764        varlist = vars()
1765        self._parallactify(pflag)
1766        self._add_history("parallactify", varlist)
1767
1768    @asaplog_post_dec
1769    def convert_pol(self, poltype=None):
1770        """\
1771        Convert the data to a different polarisation type.
1772        Note that you will need cross-polarisation terms for most conversions.
1773
1774        Parameters:
1775
1776            poltype:    The new polarisation type. Valid types are:
1777                        "linear", "circular", "stokes" and "linpol"
1778
1779        """
1780        varlist = vars()
1781        s = scantable(self._math._convertpol(self, poltype))
1782        s._add_history("convert_pol", varlist)
1783        return s
1784
1785    @asaplog_post_dec
1786    def smooth(self, kernel="hanning", width=5.0, order=2, plot=False, insitu=None):
1787        """\
1788        Smooth the spectrum by the specified kernel (conserving flux).
1789
1790        Parameters:
1791
1792            kernel:     The type of smoothing kernel. Select from
1793                        'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1794                        or 'poly'
1795
1796            width:      The width of the kernel in pixels. For hanning this is
1797                        ignored otherwise it defauls to 5 pixels.
1798                        For 'gaussian' it is the Full Width Half
1799                        Maximum. For 'boxcar' it is the full width.
1800                        For 'rmedian' and 'poly' it is the half width.
1801
1802            order:      Optional parameter for 'poly' kernel (default is 2), to
1803                        specify the order of the polnomial. Ignored by all other
1804                        kernels.
1805
1806            plot:       plot the original and the smoothed spectra.
1807                        In this each indivual fit has to be approved, by
1808                        typing 'y' or 'n'
1809
1810            insitu:     if False a new scantable is returned.
1811                        Otherwise, the scaling is done in-situ
1812                        The default is taken from .asaprc (False)
1813
1814        """
1815        if insitu is None: insitu = rcParams['insitu']
1816        self._math._setinsitu(insitu)
1817        varlist = vars()
1818
1819        if plot: orgscan = self.copy()
1820
1821        s = scantable(self._math._smooth(self, kernel.lower(), width, order))
1822        s._add_history("smooth", varlist)
1823
1824        if plot:
1825            if rcParams['plotter.gui']:
1826                from asap.asaplotgui import asaplotgui as asaplot
1827            else:
1828                from asap.asaplot import asaplot
1829            self._p=asaplot()
1830            self._p.set_panels()
1831            ylab=s._get_ordinate_label()
1832            #self._p.palette(0,["#777777","red"])
1833            for r in xrange(s.nrow()):
1834                xsm=s._getabcissa(r)
1835                ysm=s._getspectrum(r)
1836                xorg=orgscan._getabcissa(r)
1837                yorg=orgscan._getspectrum(r)
1838                self._p.clear()
1839                self._p.hold()
1840                self._p.set_axes('ylabel',ylab)
1841                self._p.set_axes('xlabel',s._getabcissalabel(r))
1842                self._p.set_axes('title',s._getsourcename(r))
1843                self._p.set_line(label='Original',color="#777777")
1844                self._p.plot(xorg,yorg)
1845                self._p.set_line(label='Smoothed',color="red")
1846                self._p.plot(xsm,ysm)
1847                ### Ugly part for legend
1848                for i in [0,1]:
1849                    self._p.subplots[0]['lines'].append([self._p.subplots[0]['axes'].lines[i]])
1850                self._p.release()
1851                ### Ugly part for legend
1852                self._p.subplots[0]['lines']=[]
1853                res = raw_input("Accept smoothing ([y]/n): ")
1854                if res.upper() == 'N':
1855                    s._setspectrum(yorg, r)
1856            self._p.unmap()
1857            self._p = None
1858            del orgscan
1859
1860        if insitu: self._assign(s)
1861        else: return s
1862
1863    @asaplog_post_dec
1864    def old_poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None, rows=None):
1865        """\
1866        Return a scan which has been baselined (all rows) by a polynomial.
1867       
1868        Parameters:
1869
1870            mask:       an optional mask
1871
1872            order:      the order of the polynomial (default is 0)
1873
1874            plot:       plot the fit and the residual. In this each
1875                        indivual fit has to be approved, by typing 'y'
1876                        or 'n'
1877
1878            uselin:     use linear polynomial fit
1879
1880            insitu:     if False a new scantable is returned.
1881                        Otherwise, the scaling is done in-situ
1882                        The default is taken from .asaprc (False)
1883
1884            rows:       row numbers of spectra to be processed.
1885                        (default is None: for all rows)
1886       
1887        Example:
1888            # return a scan baselined by a third order polynomial,
1889            # not using a mask
1890            bscan = scan.poly_baseline(order=3)
1891
1892        """
1893        if insitu is None: insitu = rcParams['insitu']
1894        if not insitu:
1895            workscan = self.copy()
1896        else:
1897            workscan = self
1898        varlist = vars()
1899        if mask is None:
1900            mask = [True for i in xrange(self.nchan())]
1901
1902        try:
1903            f = fitter()
1904            if uselin:
1905                f.set_function(lpoly=order)
1906            else:
1907                f.set_function(poly=order)
1908
1909            if rows == None:
1910                rows = xrange(workscan.nrow())
1911            elif isinstance(rows, int):
1912                rows = [ rows ]
1913           
1914            if len(rows) > 0:
1915                self.blpars = []
1916                self.masklists = []
1917                self.actualmask = []
1918           
1919            for r in rows:
1920                f.x = workscan._getabcissa(r)
1921                f.y = workscan._getspectrum(r)
1922                f.mask = mask_and(mask, workscan._getmask(r))    # (CAS-1434)
1923                f.data = None
1924                f.fit()
1925                if plot:
1926                    f.plot(residual=True)
1927                    x = raw_input("Accept fit ( [y]/n ): ")
1928                    if x.upper() == 'N':
1929                        self.blpars.append(None)
1930                        self.masklists.append(None)
1931                        self.actualmask.append(None)
1932                        continue
1933                workscan._setspectrum(f.fitter.getresidual(), r)
1934                self.blpars.append(f.get_parameters())
1935                self.masklists.append(workscan.get_masklist(f.mask, row=r))
1936                self.actualmask.append(f.mask)
1937
1938            if plot:
1939                f._p.unmap()
1940                f._p = None
1941            workscan._add_history("poly_baseline", varlist)
1942            if insitu:
1943                self._assign(workscan)
1944            else:
1945                return workscan
1946        except RuntimeError:
1947            msg = "The fit failed, possibly because it didn't converge."
1948            raise RuntimeError(msg)
1949
1950
1951    def poly_baseline(self, mask=None, order=0, plot=False, batch=False, insitu=None, rows=None):
1952        """\
1953        Return a scan which has been baselined (all rows) by a polynomial.
1954        Parameters:
1955            mask:       an optional mask
1956            order:      the order of the polynomial (default is 0)
1957            plot:       plot the fit and the residual. In this each
1958                        indivual fit has to be approved, by typing 'y'
1959                        or 'n'. Ignored if batch = True.
1960            batch:      if True a faster algorithm is used and logs
1961                        including the fit results are not output
1962                        (default is False)
1963            insitu:     if False a new scantable is returned.
1964                        Otherwise, the scaling is done in-situ
1965                        The default is taken from .asaprc (False)
1966            rows:       row numbers of spectra to be processed.
1967                        (default is None: for all rows)
1968        Example:
1969            # return a scan baselined by a third order polynomial,
1970            # not using a mask
1971            bscan = scan.poly_baseline(order=3)
1972        """
1973        if insitu is None: insitu = rcParams["insitu"]
1974        if insitu:
1975            workscan = self
1976        else:
1977            workscan = self.copy()
1978
1979        varlist = vars()
1980        nchan = workscan.nchan()
1981       
1982        if mask is None:
1983            mask = [True for i in xrange(nchan)]
1984
1985        try:
1986            if rows == None:
1987                rows = xrange(workscan.nrow())
1988            elif isinstance(rows, int):
1989                rows = [ rows ]
1990           
1991            if len(rows) > 0:
1992                self.blpars = []
1993                self.masklists = []
1994                self.actualmask = []
1995
1996            if batch:
1997                for r in rows:
1998                    workscan._poly_baseline_batch(mask, order, r)
1999            elif plot:
2000                f = fitter()
2001                f.set_function(lpoly=order)
2002                for r in rows:
2003                    f.x = workscan._getabcissa(r)
2004                    f.y = workscan._getspectrum(r)
2005                    f.mask = mask_and(mask, workscan._getmask(r))    # (CAS-1434)
2006                    f.data = None
2007                    f.fit()
2008                   
2009                    f.plot(residual=True)
2010                    accept_fit = raw_input("Accept fit ( [y]/n ): ")
2011                    if accept_fit.upper() == "N":
2012                        self.blpars.append(None)
2013                        self.masklists.append(None)
2014                        self.actualmask.append(None)
2015                        continue
2016                    workscan._setspectrum(f.fitter.getresidual(), r)
2017                    self.blpars.append(f.get_parameters())
2018                    self.masklists.append(workscan.get_masklist(f.mask, row=r))
2019                    self.actualmask.append(f.mask)
2020                   
2021                f._p.unmap()
2022                f._p = None
2023            else:
2024                import array
2025                for r in rows:
2026                    pars = array.array("f", [0.0 for i in range(order+1)])
2027                    pars_adr = pars.buffer_info()[0]
2028                    pars_len = pars.buffer_info()[1]
2029                   
2030                    errs = array.array("f", [0.0 for i in range(order+1)])
2031                    errs_adr = errs.buffer_info()[0]
2032                    errs_len = errs.buffer_info()[1]
2033                   
2034                    fmsk = array.array("i", [1 for i in range(nchan)])
2035                    fmsk_adr = fmsk.buffer_info()[0]
2036                    fmsk_len = fmsk.buffer_info()[1]
2037                   
2038                    workscan._poly_baseline(mask, order, r, pars_adr, pars_len, errs_adr, errs_len, fmsk_adr, fmsk_len)
2039                   
2040                    params = pars.tolist()
2041                    fmtd = ""
2042                    for i in xrange(len(params)): fmtd += "  p%d= %3.6f," % (i, params[i])
2043                    fmtd = fmtd[:-1]  # remove trailing ","
2044                    errors = errs.tolist()
2045                    fmask = fmsk.tolist()
2046                    for i in xrange(len(fmask)): fmask[i] = (fmask[i] > 0)    # transform (1/0) -> (True/False)
2047                   
2048                    self.blpars.append({"params":params, "fixed":[], "formatted":fmtd, "errors":errors})
2049                    self.masklists.append(workscan.get_masklist(fmask, r))
2050                    self.actualmask.append(fmask)
2051                   
2052                    asaplog.push(str(fmtd))
2053           
2054            workscan._add_history("poly_baseline", varlist)
2055           
2056            if insitu:
2057                self._assign(workscan)
2058            else:
2059                return workscan
2060           
2061        except RuntimeError, e:
2062            msg = "The fit failed, possibly because it didn't converge."
2063            if rcParams["verbose"]:
2064                asaplog.push(str(e))
2065                asaplog.push(str(msg))
2066                return
2067            else:
2068                raise RuntimeError(str(e)+'\n'+msg)
2069
2070
2071    def auto_poly_baseline(self, mask=None, edge=(0, 0), order=0,
2072                           threshold=3, chan_avg_limit=1, plot=False,
2073                           insitu=None, rows=None):
2074        """\
2075        Return a scan which has been baselined (all rows) by a polynomial.
2076        Spectral lines are detected first using linefinder and masked out
2077        to avoid them affecting the baseline solution.
2078
2079        Parameters:
2080
2081            mask:       an optional mask retreived from scantable
2082
2083            edge:       an optional number of channel to drop at the edge of
2084                        spectrum. If only one value is
2085                        specified, the same number will be dropped from
2086                        both sides of the spectrum. Default is to keep
2087                        all channels. Nested tuples represent individual
2088                        edge selection for different IFs (a number of spectral
2089                        channels can be different)
2090
2091            order:      the order of the polynomial (default is 0)
2092
2093            threshold:  the threshold used by line finder. It is better to
2094                        keep it large as only strong lines affect the
2095                        baseline solution.
2096
2097            chan_avg_limit:
2098                        a maximum number of consequtive spectral channels to
2099                        average during the search of weak and broad lines.
2100                        The default is no averaging (and no search for weak
2101                        lines). If such lines can affect the fitted baseline
2102                        (e.g. a high order polynomial is fitted), increase this
2103                        parameter (usually values up to 8 are reasonable). Most
2104                        users of this method should find the default value
2105                        sufficient.
2106
2107            plot:       plot the fit and the residual. In this each
2108                        indivual fit has to be approved, by typing 'y'
2109                        or 'n'
2110
2111            insitu:     if False a new scantable is returned.
2112                        Otherwise, the scaling is done in-situ
2113                        The default is taken from .asaprc (False)
2114            rows:       row numbers of spectra to be processed.
2115                        (default is None: for all rows)
2116
2117
2118        Example::
2119
2120            scan2 = scan.auto_poly_baseline(order=7, insitu=False)
2121
2122        """
2123        if insitu is None: insitu = rcParams['insitu']
2124        varlist = vars()
2125        from asap.asaplinefind import linefinder
2126        from asap import _is_sequence_or_number as _is_valid
2127
2128        # check whether edge is set up for each IF individually
2129        individualedge = False;
2130        if len(edge) > 1:
2131            if isinstance(edge[0], list) or isinstance(edge[0], tuple):
2132                individualedge = True;
2133
2134        if not _is_valid(edge, int) and not individualedge:
2135            raise ValueError, "Parameter 'edge' has to be an integer or a \
2136            pair of integers specified as a tuple. Nested tuples are allowed \
2137            to make individual selection for different IFs."
2138
2139        curedge = (0, 0)
2140        if individualedge:
2141            for edgepar in edge:
2142                if not _is_valid(edgepar, int):
2143                    raise ValueError, "Each element of the 'edge' tuple has \
2144                                       to be a pair of integers or an integer."
2145        else:
2146            curedge = edge;
2147
2148        if not insitu:
2149            workscan = self.copy()
2150        else:
2151            workscan = self
2152
2153        # setup fitter
2154        f = fitter()
2155        f.set_function(lpoly=order)
2156
2157        # setup line finder
2158        fl = linefinder()
2159        fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
2160
2161        fl.set_scan(workscan)
2162
2163        if mask is None:
2164            mask = _n_bools(workscan.nchan(), True)
2165       
2166        if rows is None:
2167            rows = xrange(workscan.nrow())
2168        elif isinstance(rows, int):
2169            rows = [ rows ]
2170       
2171        # Save parameters of baseline fits & masklists as a class attribute.
2172        # NOTICE: It does not reflect changes in scantable!
2173        if len(rows) > 0:
2174            self.blpars=[]
2175            self.masklists=[]
2176            self.actualmask=[]
2177        asaplog.push("Processing:")
2178        for r in rows:
2179            msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
2180                (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
2181                 workscan.getpol(r), workscan.getcycle(r))
2182            asaplog.push(msg, False)
2183
2184            # figure out edge parameter
2185            if individualedge:
2186                if len(edge) >= workscan.getif(r):
2187                    raise RuntimeError, "Number of edge elements appear to " \
2188                                        "be less than the number of IFs"
2189                    curedge = edge[workscan.getif(r)]
2190
2191            actualmask = mask_and(mask, workscan._getmask(r))    # (CAS-1434)
2192
2193            # setup line finder
2194            fl.find_lines(r, actualmask, curedge)
2195           
2196            f.x = workscan._getabcissa(r)
2197            f.y = workscan._getspectrum(r)
2198            f.mask = fl.get_mask()
2199            f.data = None
2200            f.fit()
2201
2202            # Show mask list
2203            masklist=workscan.get_masklist(f.mask, row=r)
2204            msg = "mask range: "+str(masklist)
2205            asaplog.push(msg, False)
2206
2207            if plot:
2208                f.plot(residual=True)
2209                x = raw_input("Accept fit ( [y]/n ): ")
2210                if x.upper() == 'N':
2211                    self.blpars.append(None)
2212                    self.masklists.append(None)
2213                    self.actualmask.append(None)
2214                    continue
2215
2216            workscan._setspectrum(f.fitter.getresidual(), r)
2217            self.blpars.append(f.get_parameters())
2218            self.masklists.append(masklist)
2219            self.actualmask.append(f.mask)
2220        if plot:
2221            f._p.unmap()
2222            f._p = None
2223        workscan._add_history("auto_poly_baseline", varlist)
2224        if insitu:
2225            self._assign(workscan)
2226        else:
2227            return workscan
2228
2229    @asaplog_post_dec
2230    def rotate_linpolphase(self, angle):
2231        """\
2232        Rotate the phase of the complex polarization O=Q+iU correlation.
2233        This is always done in situ in the raw data.  So if you call this
2234        function more than once then each call rotates the phase further.
2235
2236        Parameters:
2237
2238            angle:   The angle (degrees) to rotate (add) by.
2239
2240        Example::
2241
2242            scan.rotate_linpolphase(2.3)
2243
2244        """
2245        varlist = vars()
2246        self._math._rotate_linpolphase(self, angle)
2247        self._add_history("rotate_linpolphase", varlist)
2248        return
2249
2250    @asaplog_post_dec
2251    def rotate_xyphase(self, angle):
2252        """\
2253        Rotate the phase of the XY correlation.  This is always done in situ
2254        in the data.  So if you call this function more than once
2255        then each call rotates the phase further.
2256
2257        Parameters:
2258
2259            angle:   The angle (degrees) to rotate (add) by.
2260
2261        Example::
2262
2263            scan.rotate_xyphase(2.3)
2264
2265        """
2266        varlist = vars()
2267        self._math._rotate_xyphase(self, angle)
2268        self._add_history("rotate_xyphase", varlist)
2269        return
2270
2271    @asaplog_post_dec
2272    def swap_linears(self):
2273        """\
2274        Swap the linear polarisations XX and YY, or better the first two
2275        polarisations as this also works for ciculars.
2276        """
2277        varlist = vars()
2278        self._math._swap_linears(self)
2279        self._add_history("swap_linears", varlist)
2280        return
2281
2282    @asaplog_post_dec
2283    def invert_phase(self):
2284        """\
2285        Invert the phase of the complex polarisation
2286        """
2287        varlist = vars()
2288        self._math._invert_phase(self)
2289        self._add_history("invert_phase", varlist)
2290        return
2291
2292    @asaplog_post_dec
2293    def add(self, offset, insitu=None):
2294        """\
2295        Return a scan where all spectra have the offset added
2296
2297        Parameters:
2298
2299            offset:      the offset
2300
2301            insitu:      if False a new scantable is returned.
2302                         Otherwise, the scaling is done in-situ
2303                         The default is taken from .asaprc (False)
2304
2305        """
2306        if insitu is None: insitu = rcParams['insitu']
2307        self._math._setinsitu(insitu)
2308        varlist = vars()
2309        s = scantable(self._math._unaryop(self, offset, "ADD", False))
2310        s._add_history("add", varlist)
2311        if insitu:
2312            self._assign(s)
2313        else:
2314            return s
2315
2316    @asaplog_post_dec
2317    def scale(self, factor, tsys=True, insitu=None):
2318        """\
2319
2320        Return a scan where all spectra are scaled by the give 'factor'
2321
2322        Parameters:
2323
2324            factor:      the scaling factor (float or 1D float list)
2325
2326            insitu:      if False a new scantable is returned.
2327                         Otherwise, the scaling is done in-situ
2328                         The default is taken from .asaprc (False)
2329
2330            tsys:        if True (default) then apply the operation to Tsys
2331                         as well as the data
2332
2333        """
2334        if insitu is None: insitu = rcParams['insitu']
2335        self._math._setinsitu(insitu)
2336        varlist = vars()
2337        s = None
2338        import numpy
2339        if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
2340            if isinstance(factor[0], list) or isinstance(factor[0], numpy.ndarray):
2341                from asapmath import _array2dOp
2342                s = _array2dOp( self.copy(), factor, "MUL", tsys )
2343            else:
2344                s = scantable( self._math._arrayop( self.copy(), factor, "MUL", tsys ) )
2345        else:
2346            s = scantable(self._math._unaryop(self.copy(), factor, "MUL", tsys))
2347        s._add_history("scale", varlist)
2348        if insitu:
2349            self._assign(s)
2350        else:
2351            return s
2352
2353    def set_sourcetype(self, match, matchtype="pattern",
2354                       sourcetype="reference"):
2355        """\
2356        Set the type of the source to be an source or reference scan
2357        using the provided pattern.
2358
2359        Parameters:
2360
2361            match:          a Unix style pattern, regular expression or selector
2362
2363            matchtype:      'pattern' (default) UNIX style pattern or
2364                            'regex' regular expression
2365
2366            sourcetype:     the type of the source to use (source/reference)
2367
2368        """
2369        varlist = vars()
2370        basesel = self.get_selection()
2371        stype = -1
2372        if sourcetype.lower().startswith("r"):
2373            stype = 1
2374        elif sourcetype.lower().startswith("s"):
2375            stype = 0
2376        else:
2377            raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
2378        if matchtype.lower().startswith("p"):
2379            matchtype = "pattern"
2380        elif matchtype.lower().startswith("r"):
2381            matchtype = "regex"
2382        else:
2383            raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
2384        sel = selector()
2385        if isinstance(match, selector):
2386            sel = match
2387        else:
2388            sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
2389        self.set_selection(basesel+sel)
2390        self._setsourcetype(stype)
2391        self.set_selection(basesel)
2392        self._add_history("set_sourcetype", varlist)
2393
2394    @asaplog_post_dec
2395    @preserve_selection
2396    def auto_quotient(self, preserve=True, mode='paired', verify=False):
2397        """\
2398        This function allows to build quotients automatically.
2399        It assumes the observation to have the same number of
2400        "ons" and "offs"
2401
2402        Parameters:
2403
2404            preserve:       you can preserve (default) the continuum or
2405                            remove it.  The equations used are
2406
2407                            preserve: Output = Toff * (on/off) - Toff
2408
2409                            remove:   Output = Toff * (on/off) - Ton
2410
2411            mode:           the on/off detection mode
2412                            'paired' (default)
2413                            identifies 'off' scans by the
2414                            trailing '_R' (Mopra/Parkes) or
2415                            '_e'/'_w' (Tid) and matches
2416                            on/off pairs from the observing pattern
2417                            'time'
2418                            finds the closest off in time
2419
2420        .. todo:: verify argument is not implemented
2421
2422        """
2423        varlist = vars()
2424        modes = ["time", "paired"]
2425        if not mode in modes:
2426            msg = "please provide valid mode. Valid modes are %s" % (modes)
2427            raise ValueError(msg)
2428        s = None
2429        if mode.lower() == "paired":
2430            sel = self.get_selection()
2431            sel.set_query("SRCTYPE==psoff")
2432            self.set_selection(sel)
2433            offs = self.copy()
2434            sel.set_query("SRCTYPE==pson")
2435            self.set_selection(sel)
2436            ons = self.copy()
2437            s = scantable(self._math._quotient(ons, offs, preserve))
2438        elif mode.lower() == "time":
2439            s = scantable(self._math._auto_quotient(self, mode, preserve))
2440        s._add_history("auto_quotient", varlist)
2441        return s
2442
2443    @asaplog_post_dec
2444    def mx_quotient(self, mask = None, weight='median', preserve=True):
2445        """\
2446        Form a quotient using "off" beams when observing in "MX" mode.
2447
2448        Parameters:
2449
2450            mask:           an optional mask to be used when weight == 'stddev'
2451
2452            weight:         How to average the off beams.  Default is 'median'.
2453
2454            preserve:       you can preserve (default) the continuum or
2455                            remove it.  The equations used are:
2456
2457                                preserve: Output = Toff * (on/off) - Toff
2458
2459                                remove:   Output = Toff * (on/off) - Ton
2460
2461        """
2462        mask = mask or ()
2463        varlist = vars()
2464        on = scantable(self._math._mx_extract(self, 'on'))
2465        preoff = scantable(self._math._mx_extract(self, 'off'))
2466        off = preoff.average_time(mask=mask, weight=weight, scanav=False)
2467        from asapmath  import quotient
2468        q = quotient(on, off, preserve)
2469        q._add_history("mx_quotient", varlist)
2470        return q
2471
2472    @asaplog_post_dec
2473    def freq_switch(self, insitu=None):
2474        """\
2475        Apply frequency switching to the data.
2476
2477        Parameters:
2478
2479            insitu:      if False a new scantable is returned.
2480                         Otherwise, the swictching is done in-situ
2481                         The default is taken from .asaprc (False)
2482
2483        """
2484        if insitu is None: insitu = rcParams['insitu']
2485        self._math._setinsitu(insitu)
2486        varlist = vars()
2487        s = scantable(self._math._freqswitch(self))
2488        s._add_history("freq_switch", varlist)
2489        if insitu:
2490            self._assign(s)
2491        else:
2492            return s
2493
2494    @asaplog_post_dec
2495    def recalc_azel(self):
2496        """Recalculate the azimuth and elevation for each position."""
2497        varlist = vars()
2498        self._recalcazel()
2499        self._add_history("recalc_azel", varlist)
2500        return
2501
2502    @asaplog_post_dec
2503    def __add__(self, other):
2504        varlist = vars()
2505        s = None
2506        if isinstance(other, scantable):
2507            s = scantable(self._math._binaryop(self, other, "ADD"))
2508        elif isinstance(other, float):
2509            s = scantable(self._math._unaryop(self, other, "ADD", False))
2510        else:
2511            raise TypeError("Other input is not a scantable or float value")
2512        s._add_history("operator +", varlist)
2513        return s
2514
2515    @asaplog_post_dec
2516    def __sub__(self, other):
2517        """
2518        implicit on all axes and on Tsys
2519        """
2520        varlist = vars()
2521        s = None
2522        if isinstance(other, scantable):
2523            s = scantable(self._math._binaryop(self, other, "SUB"))
2524        elif isinstance(other, float):
2525            s = scantable(self._math._unaryop(self, other, "SUB", False))
2526        else:
2527            raise TypeError("Other input is not a scantable or float value")
2528        s._add_history("operator -", varlist)
2529        return s
2530
2531    @asaplog_post_dec
2532    def __mul__(self, other):
2533        """
2534        implicit on all axes and on Tsys
2535        """
2536        varlist = vars()
2537        s = None
2538        if isinstance(other, scantable):
2539            s = scantable(self._math._binaryop(self, other, "MUL"))
2540        elif isinstance(other, float):
2541            s = scantable(self._math._unaryop(self, other, "MUL", False))
2542        else:
2543            raise TypeError("Other input is not a scantable or float value")
2544        s._add_history("operator *", varlist)
2545        return s
2546
2547
2548    @asaplog_post_dec
2549    def __div__(self, other):
2550        """
2551        implicit on all axes and on Tsys
2552        """
2553        varlist = vars()
2554        s = None
2555        if isinstance(other, scantable):
2556            s = scantable(self._math._binaryop(self, other, "DIV"))
2557        elif isinstance(other, float):
2558            if other == 0.0:
2559                raise ZeroDivisionError("Dividing by zero is not recommended")
2560            s = scantable(self._math._unaryop(self, other, "DIV", False))
2561        else:
2562            raise TypeError("Other input is not a scantable or float value")
2563        s._add_history("operator /", varlist)
2564        return s
2565
2566    @asaplog_post_dec
2567    def get_fit(self, row=0):
2568        """\
2569        Print or return the stored fits for a row in the scantable
2570
2571        Parameters:
2572
2573            row:    the row which the fit has been applied to.
2574
2575        """
2576        if row > self.nrow():
2577            return
2578        from asap.asapfit import asapfit
2579        fit = asapfit(self._getfit(row))
2580        asaplog.push( '%s' %(fit) )
2581        return fit.as_dict()
2582
2583    def flag_nans(self):
2584        """\
2585        Utility function to flag NaN values in the scantable.
2586        """
2587        import numpy
2588        basesel = self.get_selection()
2589        for i in range(self.nrow()):
2590            sel = self.get_row_selector(i)
2591            self.set_selection(basesel+sel)
2592            nans = numpy.isnan(self._getspectrum(0))
2593        if numpy.any(nans):
2594            bnans = [ bool(v) for v in nans]
2595            self.flag(bnans)
2596        self.set_selection(basesel)
2597
2598    def get_row_selector(self, rowno):
2599        return selector(beams=self.getbeam(rowno),
2600                        ifs=self.getif(rowno),
2601                        pols=self.getpol(rowno),
2602                        scans=self.getscan(rowno),
2603                        cycles=self.getcycle(rowno))
2604
2605    def _add_history(self, funcname, parameters):
2606        if not rcParams['scantable.history']:
2607            return
2608        # create date
2609        sep = "##"
2610        from datetime import datetime
2611        dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
2612        hist = dstr+sep
2613        hist += funcname+sep#cdate+sep
2614        if parameters.has_key('self'): del parameters['self']
2615        for k, v in parameters.iteritems():
2616            if type(v) is dict:
2617                for k2, v2 in v.iteritems():
2618                    hist += k2
2619                    hist += "="
2620                    if isinstance(v2, scantable):
2621                        hist += 'scantable'
2622                    elif k2 == 'mask':
2623                        if isinstance(v2, list) or isinstance(v2, tuple):
2624                            hist += str(self._zip_mask(v2))
2625                        else:
2626                            hist += str(v2)
2627                    else:
2628                        hist += str(v2)
2629            else:
2630                hist += k
2631                hist += "="
2632                if isinstance(v, scantable):
2633                    hist += 'scantable'
2634                elif k == 'mask':
2635                    if isinstance(v, list) or isinstance(v, tuple):
2636                        hist += str(self._zip_mask(v))
2637                    else:
2638                        hist += str(v)
2639                else:
2640                    hist += str(v)
2641            hist += sep
2642        hist = hist[:-2] # remove trailing '##'
2643        self._addhistory(hist)
2644
2645
2646    def _zip_mask(self, mask):
2647        mask = list(mask)
2648        i = 0
2649        segments = []
2650        while mask[i:].count(1):
2651            i += mask[i:].index(1)
2652            if mask[i:].count(0):
2653                j = i + mask[i:].index(0)
2654            else:
2655                j = len(mask)
2656            segments.append([i, j])
2657            i = j
2658        return segments
2659
2660    def _get_ordinate_label(self):
2661        fu = "("+self.get_fluxunit()+")"
2662        import re
2663        lbl = "Intensity"
2664        if re.match(".K.", fu):
2665            lbl = "Brightness Temperature "+ fu
2666        elif re.match(".Jy.", fu):
2667            lbl = "Flux density "+ fu
2668        return lbl
2669
2670    def _check_ifs(self):
2671        nchans = [self.nchan(i) for i in range(self.nif(-1))]
2672        nchans = filter(lambda t: t > 0, nchans)
2673        return (sum(nchans)/len(nchans) == nchans[0])
2674
2675    @asaplog_post_dec
2676    #def _fill(self, names, unit, average, getpt, antenna):
2677    def _fill(self, names, unit, average, opts={}):
2678        first = True
2679        fullnames = []
2680        for name in names:
2681            name = os.path.expandvars(name)
2682            name = os.path.expanduser(name)
2683            if not os.path.exists(name):
2684                msg = "File '%s' does not exists" % (name)
2685                raise IOError(msg)
2686            fullnames.append(name)
2687        if average:
2688            asaplog.push('Auto averaging integrations')
2689        stype = int(rcParams['scantable.storage'].lower() == 'disk')
2690        for name in fullnames:
2691            tbl = Scantable(stype)
2692            r = filler(tbl)
2693            rx = rcParams['scantable.reference']
2694            r.setreferenceexpr(rx)
2695            msg = "Importing %s..." % (name)
2696            asaplog.push(msg, False)
2697            #opts = {'ms': {'antenna' : antenna, 'getpt': getpt} }
2698            r.open(name, opts)# antenna, -1, -1, getpt)
2699            r.fill()
2700            if average:
2701                tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
2702            if not first:
2703                tbl = self._math._merge([self, tbl])
2704            Scantable.__init__(self, tbl)
2705            r.close()
2706            del r, tbl
2707            first = False
2708            #flush log
2709        asaplog.post()
2710        if unit is not None:
2711            self.set_fluxunit(unit)
2712        if not is_casapy():
2713            self.set_freqframe(rcParams['scantable.freqframe'])
2714
2715    def __getitem__(self, key):
2716        if key < 0:
2717            key += self.nrow()
2718        if key >= self.nrow():
2719            raise IndexError("Row index out of range.")
2720        return self._getspectrum(key)
2721
2722    def __setitem__(self, key, value):
2723        if key < 0:
2724            key += self.nrow()
2725        if key >= self.nrow():
2726            raise IndexError("Row index out of range.")
2727        if not hasattr(value, "__len__") or \
2728                len(value) > self.nchan(self.getif(key)):
2729            raise ValueError("Spectrum length doesn't match.")
2730        return self._setspectrum(value, key)
2731
2732    def __len__(self):
2733        return self.nrow()
2734
2735    def __iter__(self):
2736        for i in range(len(self)):
2737            yield self[i]
Note: See TracBrowser for help on using the repository browser.