source: trunk/python/scantable.py @ 2004

Last change on this file since 2004 was 2004, checked in by Takeshi Nakazato, 13 years ago

New Development: No

JIRA Issue: Yes .CAS-2718

Ready for Test: Yes

Interface Changes: No

What Interface Changed: Please list interface changes

Test Programs: List test programs

Put in Release Notes: Yes/No?

Module(s): Module Names change impacts.

Description: Describe your changes here...

The MSFiller is called instead of PKSFiller when input data is MS.
I have tested all task regressions as well as sdsave unit test and passed.

A few modification was needed for STMath::dototalpower() and
STWriter::write().


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 93.9 KB
Line 
1"""This module defines the scantable class."""
2
3import os
4import numpy
5try:
6    from functools import wraps as wraps_dec
7except ImportError:
8    from asap.compatibility import wraps as wraps_dec
9
10from asap.env import is_casapy
11from asap._asap import Scantable
12from asap._asap import filler, msfiller
13from asap.parameters import rcParams
14from asap.logging import asaplog, asaplog_post_dec
15from asap.selector import selector
16from asap.linecatalog import linecatalog
17from asap.coordinate import coordinate
18from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
19from asap.asapfitter import fitter
20
21
22def preserve_selection(func):
23    @wraps_dec(func)
24    def wrap(obj, *args, **kw):
25        basesel = obj.get_selection()
26        try:
27            val = func(obj, *args, **kw)
28        finally:
29            obj.set_selection(basesel)
30        return val
31    return wrap
32
33def is_scantable(filename):
34    """Is the given file a scantable?
35
36    Parameters:
37
38        filename: the name of the file/directory to test
39
40    """
41    if ( os.path.isdir(filename)
42         and os.path.exists(filename+'/table.info')
43         and os.path.exists(filename+'/table.dat') ):
44        f=open(filename+'/table.info')
45        l=f.readline()
46        f.close()
47        #if ( l.find('Scantable') != -1 ):
48        if ( l.find('Measurement Set') == -1 ):
49            return True
50        else:
51            return False
52    else:
53        return False
54##     return (os.path.isdir(filename)
55##             and not os.path.exists(filename+'/table.f1')
56##             and os.path.exists(filename+'/table.info'))
57
58def is_ms(filename):
59    """Is the given file a MeasurementSet?
60
61    Parameters:
62
63        filename: the name of the file/directory to test
64
65    """
66    if ( os.path.isdir(filename)
67         and os.path.exists(filename+'/table.info')
68         and os.path.exists(filename+'/table.dat') ):
69        f=open(filename+'/table.info')
70        l=f.readline()
71        f.close()
72        if ( l.find('Measurement Set') != -1 ):
73            return True
74        else:
75            return False
76    else:
77        return False
78   
79class scantable(Scantable):
80    """\
81        The ASAP container for scans (single-dish data).
82    """
83
84    @asaplog_post_dec
85    #def __init__(self, filename, average=None, unit=None, getpt=None,
86    #             antenna=None, parallactify=None):
87    def __init__(self, filename, average=None, unit=None, parallactify=None, **args):
88        """\
89        Create a scantable from a saved one or make a reference
90
91        Parameters:
92
93            filename:     the name of an asap table on disk
94                          or
95                          the name of a rpfits/sdfits/ms file
96                          (integrations within scans are auto averaged
97                          and the whole file is read) or
98                          [advanced] a reference to an existing scantable
99
100            average:      average all integrations withinb a scan on read.
101                          The default (True) is taken from .asaprc.
102
103            unit:         brightness unit; must be consistent with K or Jy.
104                          Over-rides the default selected by the filler
105                          (input rpfits/sdfits/ms) or replaces the value
106                          in existing scantables
107
108            getpt:        for MeasurementSet input data only:
109                          If True, all pointing data are filled.
110                          The deafult is False, which makes time to load
111                          the MS data faster in some cases.
112
113            antenna:      for MeasurementSet input data only:
114                          Antenna selection. integer (id) or string (name or id).
115
116            parallactify: Indicate that the data had been parallatified. Default
117                          is taken from rc file.
118
119        """
120        if average is None:
121            average = rcParams['scantable.autoaverage']
122        #if getpt is None:
123        #    getpt = True
124        #if antenna is not None:
125        #    asaplog.push("Antenna selection currently unsupported."
126        #                 "Using ''")
127        #    asaplog.post('WARN')
128        #if antenna is None:
129        #    antenna = ''
130        #elif type(antenna) == int:
131        #    antenna = '%s' % antenna
132        #elif type(antenna) == list:
133        #    tmpstr = ''
134        #    for i in range( len(antenna) ):
135        #        if type(antenna[i]) == int:
136        #            tmpstr = tmpstr + ('%s,'%(antenna[i]))
137        #        elif type(antenna[i]) == str:
138        #            tmpstr=tmpstr+antenna[i]+','
139        #        else:
140        #            raise TypeError('Bad antenna selection.')
141        #    antenna = tmpstr.rstrip(',')
142        parallactify = parallactify or rcParams['scantable.parallactify']
143        varlist = vars()
144        from asap._asap import stmath
145        self._math = stmath( rcParams['insitu'] )
146        if isinstance(filename, Scantable):
147            Scantable.__init__(self, filename)
148        else:
149            if isinstance(filename, str):
150                filename = os.path.expandvars(filename)
151                filename = os.path.expanduser(filename)
152                if not os.path.exists(filename):
153                    s = "File '%s' not found." % (filename)
154                    raise IOError(s)
155                if is_scantable(filename):
156                    ondisk = rcParams['scantable.storage'] == 'disk'
157                    Scantable.__init__(self, filename, ondisk)
158                    if unit is not None:
159                        self.set_fluxunit(unit)
160                    # do not reset to the default freqframe
161                    #self.set_freqframe(rcParams['scantable.freqframe'])
162                #elif os.path.isdir(filename) \
163                #         and not os.path.exists(filename+'/table.f1'):
164                elif is_ms(filename):
165                    # Measurement Set
166                    opts={'ms': {}}
167                    mskeys=['getpt','antenna']
168                    for key in mskeys:
169                        if key in args.keys():
170                            opts['ms'][key] = args[key]
171                    #self._fill([filename], unit, average, getpt, antenna)
172                    self._fill([filename], unit, average, opts)
173                elif os.path.isfile(filename):
174                    #self._fill([filename], unit, average, getpt, antenna)
175                    self._fill([filename], unit, average)
176                else:
177                    msg = "The given file '%s'is not a valid " \
178                          "asap table." % (filename)
179                    raise IOError(msg)
180            elif (isinstance(filename, list) or isinstance(filename, tuple)) \
181                  and isinstance(filename[-1], str):
182                #self._fill(filename, unit, average, getpt, antenna)
183                self._fill(filename, unit, average)
184        self.parallactify(parallactify)
185        self._add_history("scantable", varlist)
186
187    @asaplog_post_dec
188    def save(self, name=None, format=None, overwrite=False):
189        """\
190        Store the scantable on disk. This can be an asap (aips++) Table,
191        SDFITS or MS2 format.
192
193        Parameters:
194
195            name:        the name of the outputfile. For format "ASCII"
196                         this is the root file name (data in 'name'.txt
197                         and header in 'name'_header.txt)
198
199            format:      an optional file format. Default is ASAP.
200                         Allowed are:
201
202                            * 'ASAP' (save as ASAP [aips++] Table),
203                            * 'SDFITS' (save as SDFITS file)
204                            * 'ASCII' (saves as ascii text file)
205                            * 'MS2' (saves as an casacore MeasurementSet V2)
206                            * 'FITS' (save as image FITS - not readable by class)
207                            * 'CLASS' (save as FITS readable by CLASS)
208
209            overwrite:   If the file should be overwritten if it exists.
210                         The default False is to return with warning
211                         without writing the output. USE WITH CARE.
212
213        Example::
214
215            scan.save('myscan.asap')
216            scan.save('myscan.sdfits', 'SDFITS')
217
218        """
219        from os import path
220        format = format or rcParams['scantable.save']
221        suffix = '.'+format.lower()
222        if name is None or name == "":
223            name = 'scantable'+suffix
224            msg = "No filename given. Using default name %s..." % name
225            asaplog.push(msg)
226        name = path.expandvars(name)
227        if path.isfile(name) or path.isdir(name):
228            if not overwrite:
229                msg = "File %s exists." % name
230                raise IOError(msg)
231        format2 = format.upper()
232        if format2 == 'ASAP':
233            self._save(name)
234##         elif format2 == 'MS2':
235##             msopt = {'ms': {'overwrite': overwrite } }
236##             from asap._asap import mswriter
237##             writer = mswriter( self )
238##             writer.write( name, msopt )
239        else:
240            from asap._asap import stwriter as stw
241            writer = stw(format2)
242            writer.write(self, name)
243        return
244
245    def copy(self):
246        """Return a copy of this scantable.
247
248        *Note*:
249
250            This makes a full (deep) copy. scan2 = scan1 makes a reference.
251
252        Example::
253
254            copiedscan = scan.copy()
255
256        """
257        sd = scantable(Scantable._copy(self))
258        return sd
259
260    def drop_scan(self, scanid=None):
261        """\
262        Return a new scantable where the specified scan number(s) has(have)
263        been dropped.
264
265        Parameters:
266
267            scanid:    a (list of) scan number(s)
268
269        """
270        from asap import _is_sequence_or_number as _is_valid
271        from asap import _to_list
272        from asap import unique
273        if not _is_valid(scanid):
274            raise RuntimeError( 'Please specify a scanno to drop from the scantable' )
275        scanid = _to_list(scanid)
276        allscans = unique([ self.getscan(i) for i in range(self.nrow())])
277        for sid in scanid: allscans.remove(sid)
278        if len(allscans) == 0:
279            raise ValueError("Can't remove all scans")
280        sel = selector(scans=allscans)
281        return self._select_copy(sel)
282
283    def _select_copy(self, selection):
284        orig = self.get_selection()
285        self.set_selection(orig+selection)
286        cp = self.copy()
287        self.set_selection(orig)
288        return cp
289
290    def get_scan(self, scanid=None):
291        """\
292        Return a specific scan (by scanno) or collection of scans (by
293        source name) in a new scantable.
294
295        *Note*:
296
297            See scantable.drop_scan() for the inverse operation.
298
299        Parameters:
300
301            scanid:    a (list of) scanno or a source name, unix-style
302                       patterns are accepted for source name matching, e.g.
303                       '*_R' gets all 'ref scans
304
305        Example::
306
307            # get all scans containing the source '323p459'
308            newscan = scan.get_scan('323p459')
309            # get all 'off' scans
310            refscans = scan.get_scan('*_R')
311            # get a susbset of scans by scanno (as listed in scan.summary())
312            newscan = scan.get_scan([0, 2, 7, 10])
313
314        """
315        if scanid is None:
316            raise RuntimeError( 'Please specify a scan no or name to '
317                                'retrieve from the scantable' )
318        try:
319            bsel = self.get_selection()
320            sel = selector()
321            if type(scanid) is str:
322                sel.set_name(scanid)
323                return self._select_copy(sel)
324            elif type(scanid) is int:
325                sel.set_scans([scanid])
326                return self._select_copy(sel)
327            elif type(scanid) is list:
328                sel.set_scans(scanid)
329                return self._select_copy(sel)
330            else:
331                msg = "Illegal scanid type, use 'int' or 'list' if ints."
332                raise TypeError(msg)
333        except RuntimeError:
334            raise
335
336    def __str__(self):
337        return Scantable._summary(self, True)
338
339    def summary(self, filename=None):
340        """\
341        Print a summary of the contents of this scantable.
342
343        Parameters:
344
345            filename:    the name of a file to write the putput to
346                         Default - no file output
347
348        """
349        info = Scantable._summary(self, True)
350        if filename is not None:
351            if filename is "":
352                filename = 'scantable_summary.txt'
353            from os.path import expandvars, isdir
354            filename = expandvars(filename)
355            if not isdir(filename):
356                data = open(filename, 'w')
357                data.write(info)
358                data.close()
359            else:
360                msg = "Illegal file name '%s'." % (filename)
361                raise IOError(msg)
362        return page(info)
363
364    def get_spectrum(self, rowno):
365        """Return the spectrum for the current row in the scantable as a list.
366
367        Parameters:
368
369             rowno:   the row number to retrieve the spectrum from
370
371        """
372        return self._getspectrum(rowno)
373
374    def get_mask(self, rowno):
375        """Return the mask for the current row in the scantable as a list.
376
377        Parameters:
378
379             rowno:   the row number to retrieve the mask from
380
381        """
382        return self._getmask(rowno)
383
384    def set_spectrum(self, spec, rowno):
385        """Set the spectrum for the current row in the scantable.
386
387        Parameters:
388
389             spec:   the new spectrum
390
391             rowno:  the row number to set the spectrum for
392
393        """
394        assert(len(spec) == self.nchan())
395        return self._setspectrum(spec, rowno)
396
397    def get_coordinate(self, rowno):
398        """Return the (spectral) coordinate for a a given 'rowno'.
399
400        *Note*:
401
402            * This coordinate is only valid until a scantable method modifies
403              the frequency axis.
404            * This coordinate does contain the original frequency set-up
405              NOT the new frame. The conversions however are done using the user
406              specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
407              use scantable.freq_align first. Without it there is no closure,
408              i.e.::
409
410                  c = myscan.get_coordinate(0)
411                  c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
412
413        Parameters:
414
415             rowno:    the row number for the spectral coordinate
416
417        """
418        return coordinate(Scantable.get_coordinate(self, rowno))
419
420    def get_selection(self):
421        """\
422        Get the selection object currently set on this scantable.
423
424        Example::
425
426            sel = scan.get_selection()
427            sel.set_ifs(0)              # select IF 0
428            scan.set_selection(sel)     # apply modified selection
429
430        """
431        return selector(self._getselection())
432
433    def set_selection(self, selection=None, **kw):
434        """\
435        Select a subset of the data. All following operations on this scantable
436        are only applied to thi selection.
437
438        Parameters:
439
440            selection:    a selector object (default unset the selection), or
441                          any combination of "pols", "ifs", "beams", "scans",
442                          "cycles", "name", "query"
443
444        Examples::
445
446            sel = selector()         # create a selection object
447            self.set_scans([0, 3])    # select SCANNO 0 and 3
448            scan.set_selection(sel)  # set the selection
449            scan.summary()           # will only print summary of scanno 0 an 3
450            scan.set_selection()     # unset the selection
451            # or the equivalent
452            scan.set_selection(scans=[0,3])
453            scan.summary()           # will only print summary of scanno 0 an 3
454            scan.set_selection()     # unset the selection
455
456        """
457        if selection is None:
458            # reset
459            if len(kw) == 0:
460                selection = selector()
461            else:
462                # try keywords
463                for k in kw:
464                    if k not in selector.fields:
465                        raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
466                selection = selector(**kw)
467        self._setselection(selection)
468
469    def get_row(self, row=0, insitu=None):
470        """\
471        Select a row in the scantable.
472        Return a scantable with single row.
473
474        Parameters:
475
476            row:    row no of integration, default is 0.
477            insitu: if False a new scantable is returned. Otherwise, the
478                    scaling is done in-situ. The default is taken from .asaprc
479                    (False)
480
481        """
482        if insitu is None: insitu = rcParams['insitu']
483        if not insitu:
484            workscan = self.copy()
485        else:
486            workscan = self
487        # Select a row
488        sel=selector()
489        sel.set_rows([row])
490        #sel.set_scans([workscan.getscan(row)])
491        #sel.set_cycles([workscan.getcycle(row)])
492        #sel.set_beams([workscan.getbeam(row)])
493        #sel.set_ifs([workscan.getif(row)])
494        #sel.set_polarisations([workscan.getpol(row)])
495        #sel.set_name(workscan._getsourcename(row))
496        workscan.set_selection(sel)
497        if not workscan.nrow() == 1:
498            msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
499            raise RuntimeError(msg)
500        del sel
501        if insitu:
502            self._assign(workscan)
503        else:
504            return workscan
505
506    @asaplog_post_dec
507    def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
508        """\
509        Determine the specified statistic of the current beam/if/pol
510        Takes a 'mask' as an optional parameter to specify which
511        channels should be excluded.
512
513        Parameters:
514
515            stat:    'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
516                     'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
517
518            mask:    an optional mask specifying where the statistic
519                     should be determined.
520
521            form:    format string to print statistic values
522
523            row:     row number of spectrum to process.
524                     (default is None: for all rows)
525
526        Example:
527            scan.set_unit('channel')
528            msk = scan.create_mask([100, 200], [500, 600])
529            scan.stats(stat='mean', mask=m)
530
531        """
532        mask = mask or []
533        if not self._check_ifs():
534            raise ValueError("Cannot apply mask as the IFs have different "
535                             "number of channels. Please use setselection() "
536                             "to select individual IFs")
537        rtnabc = False
538        if stat.lower().endswith('_abc'): rtnabc = True
539        getchan = False
540        if stat.lower().startswith('min') or stat.lower().startswith('max'):
541            chan = self._math._minmaxchan(self, mask, stat)
542            getchan = True
543            statvals = []
544        if not rtnabc:
545            if row == None:
546                statvals = self._math._stats(self, mask, stat)
547            else:
548                statvals = self._math._statsrow(self, mask, stat, int(row))
549
550        #def cb(i):
551        #    return statvals[i]
552
553        #return self._row_callback(cb, stat)
554
555        label=stat
556        #callback=cb
557        out = ""
558        #outvec = []
559        sep = '-'*50
560
561        if row == None:
562            rows = xrange(self.nrow())
563        elif isinstance(row, int):
564            rows = [ row ]
565
566        for i in rows:
567            refstr = ''
568            statunit= ''
569            if getchan:
570                qx, qy = self.chan2data(rowno=i, chan=chan[i])
571                if rtnabc:
572                    statvals.append(qx['value'])
573                    refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
574                    statunit= '['+qx['unit']+']'
575                else:
576                    refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
577
578            tm = self._gettime(i)
579            src = self._getsourcename(i)
580            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
581            out += 'Time[%s]:\n' % (tm)
582            if self.nbeam(-1) > 1: out +=  ' Beam[%d] ' % (self.getbeam(i))
583            if self.nif(-1) > 1:   out +=  ' IF[%d] ' % (self.getif(i))
584            if self.npol(-1) > 1:  out +=  ' Pol[%d] ' % (self.getpol(i))
585            #outvec.append(callback(i))
586            if len(rows) > 1:
587                # out += ('= %'+form) % (outvec[i]) +'   '+refstr+'\n'
588                out += ('= %'+form) % (statvals[i]) +'   '+refstr+'\n'
589            else:
590                # out += ('= %'+form) % (outvec[0]) +'   '+refstr+'\n'
591                out += ('= %'+form) % (statvals[0]) +'   '+refstr+'\n'
592            out +=  sep+"\n"
593
594        import os
595        if os.environ.has_key( 'USER' ):
596            usr = os.environ['USER']
597        else:
598            import commands
599            usr = commands.getoutput( 'whoami' )
600        tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
601        f = open(tmpfile,'w')
602        print >> f, sep
603        print >> f, ' %s %s' % (label, statunit)
604        print >> f, sep
605        print >> f, out
606        f.close()
607        f = open(tmpfile,'r')
608        x = f.readlines()
609        f.close()
610        asaplog.push(''.join(x), False)
611
612        return statvals
613
614    def chan2data(self, rowno=0, chan=0):
615        """\
616        Returns channel/frequency/velocity and spectral value
617        at an arbitrary row and channel in the scantable.
618
619        Parameters:
620
621            rowno:   a row number in the scantable. Default is the
622                     first row, i.e. rowno=0
623
624            chan:    a channel in the scantable. Default is the first
625                     channel, i.e. pos=0
626
627        """
628        if isinstance(rowno, int) and isinstance(chan, int):
629            qx = {'unit': self.get_unit(),
630                  'value': self._getabcissa(rowno)[chan]}
631            qy = {'unit': self.get_fluxunit(),
632                  'value': self._getspectrum(rowno)[chan]}
633            return qx, qy
634
635    def stddev(self, mask=None):
636        """\
637        Determine the standard deviation of the current beam/if/pol
638        Takes a 'mask' as an optional parameter to specify which
639        channels should be excluded.
640
641        Parameters:
642
643            mask:    an optional mask specifying where the standard
644                     deviation should be determined.
645
646        Example::
647
648            scan.set_unit('channel')
649            msk = scan.create_mask([100, 200], [500, 600])
650            scan.stddev(mask=m)
651
652        """
653        return self.stats(stat='stddev', mask=mask);
654
655
656    def get_column_names(self):
657        """\
658        Return a  list of column names, which can be used for selection.
659        """
660        return list(Scantable.get_column_names(self))
661
662    def get_tsys(self, row=-1):
663        """\
664        Return the System temperatures.
665
666        Parameters:
667
668            row:    the rowno to get the information for. (default all rows)
669
670        Returns:
671
672            a list of Tsys values for the current selection
673
674        """
675        if row > -1:
676            return self._get_column(self._gettsys, row)
677        return self._row_callback(self._gettsys, "Tsys")
678
679
680    def get_weather(self, row=-1):
681        """\
682        Return the weather informations.
683
684        Parameters:
685
686            row:    the rowno to get the information for. (default all rows)
687
688        Returns:
689
690            a dict or list of of dicts of values for the current selection
691
692        """
693
694        values = self._get_column(self._get_weather, row)
695        if row > -1:
696            return {'temperature': values[0],
697                    'pressure': values[1], 'humidity' : values[2],
698                    'windspeed' : values[3], 'windaz' : values[4]
699                    }
700        else:
701            out = []
702            for r in values:
703
704                out.append({'temperature': r[0],
705                            'pressure': r[1], 'humidity' : r[2],
706                            'windspeed' : r[3], 'windaz' : r[4]
707                    })
708            return out
709
710    def _row_callback(self, callback, label):
711        out = ""
712        outvec = []
713        sep = '-'*50
714        for i in range(self.nrow()):
715            tm = self._gettime(i)
716            src = self._getsourcename(i)
717            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
718            out += 'Time[%s]:\n' % (tm)
719            if self.nbeam(-1) > 1:
720                out +=  ' Beam[%d] ' % (self.getbeam(i))
721            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (self.getif(i))
722            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (self.getpol(i))
723            outvec.append(callback(i))
724            out += '= %3.3f\n' % (outvec[i])
725            out +=  sep+'\n'
726
727        asaplog.push(sep)
728        asaplog.push(" %s" % (label))
729        asaplog.push(sep)
730        asaplog.push(out)
731        asaplog.post()
732        return outvec
733
734    def _get_column(self, callback, row=-1, *args):
735        """
736        """
737        if row == -1:
738            return [callback(i, *args) for i in range(self.nrow())]
739        else:
740            if  0 <= row < self.nrow():
741                return callback(row, *args)
742
743
744    def get_time(self, row=-1, asdatetime=False, prec=-1):
745        """\
746        Get a list of time stamps for the observations.
747        Return a datetime object or a string (default) for each
748        integration time stamp in the scantable.
749
750        Parameters:
751
752            row:          row no of integration. Default -1 return all rows
753
754            asdatetime:   return values as datetime objects rather than strings
755
756            prec:         number of digits shown. Default -1 to automatic calculation.
757                          Note this number is equals to the digits of MVTime,
758                          i.e., 0<prec<3: dates with hh:: only,
759                          <5: with hh:mm:, <7 or 0: with hh:mm:ss,
760                          and 6> : with hh:mm:ss.tt... (prec-6 t's added)
761
762        """
763        from datetime import datetime
764        if prec < 0:
765            # automagically set necessary precision +1
766            prec = 7 - numpy.floor(numpy.log10(numpy.min(self.get_inttime(row))))
767            prec = max(6, int(prec))
768        else:
769            prec = max(0, prec)
770        if asdatetime:
771            #precision can be 1 millisecond at max
772            prec = min(12, prec)
773
774        times = self._get_column(self._gettime, row, prec)
775        if not asdatetime:
776            return times
777        format = "%Y/%m/%d/%H:%M:%S.%f"
778        if prec < 7:
779            nsub = 1 + (((6-prec)/2) % 3)
780            substr = [".%f","%S","%M"]
781            for i in range(nsub):
782                format = format.replace(substr[i],"")
783        if isinstance(times, list):
784            return [datetime.strptime(i, format) for i in times]
785        else:
786            return datetime.strptime(times, format)
787
788
789    def get_inttime(self, row=-1):
790        """\
791        Get a list of integration times for the observations.
792        Return a time in seconds for each integration in the scantable.
793
794        Parameters:
795
796            row:    row no of integration. Default -1 return all rows.
797
798        """
799        return self._get_column(self._getinttime, row)
800
801
802    def get_sourcename(self, row=-1):
803        """\
804        Get a list source names for the observations.
805        Return a string for each integration in the scantable.
806        Parameters:
807
808            row:    row no of integration. Default -1 return all rows.
809
810        """
811        return self._get_column(self._getsourcename, row)
812
813    def get_elevation(self, row=-1):
814        """\
815        Get a list of elevations for the observations.
816        Return a float for each integration in the scantable.
817
818        Parameters:
819
820            row:    row no of integration. Default -1 return all rows.
821
822        """
823        return self._get_column(self._getelevation, row)
824
825    def get_azimuth(self, row=-1):
826        """\
827        Get a list of azimuths for the observations.
828        Return a float for each integration in the scantable.
829
830        Parameters:
831            row:    row no of integration. Default -1 return all rows.
832
833        """
834        return self._get_column(self._getazimuth, row)
835
836    def get_parangle(self, row=-1):
837        """\
838        Get a list of parallactic angles for the observations.
839        Return a float for each integration in the scantable.
840
841        Parameters:
842
843            row:    row no of integration. Default -1 return all rows.
844
845        """
846        return self._get_column(self._getparangle, row)
847
848    def get_direction(self, row=-1):
849        """
850        Get a list of Positions on the sky (direction) for the observations.
851        Return a string for each integration in the scantable.
852
853        Parameters:
854
855            row:    row no of integration. Default -1 return all rows
856
857        """
858        return self._get_column(self._getdirection, row)
859
860    def get_directionval(self, row=-1):
861        """\
862        Get a list of Positions on the sky (direction) for the observations.
863        Return a float for each integration in the scantable.
864
865        Parameters:
866
867            row:    row no of integration. Default -1 return all rows
868
869        """
870        return self._get_column(self._getdirectionvec, row)
871
872    @asaplog_post_dec
873    def set_unit(self, unit='channel'):
874        """\
875        Set the unit for all following operations on this scantable
876
877        Parameters:
878
879            unit:    optional unit, default is 'channel'. Use one of '*Hz',
880                     'km/s', 'channel' or equivalent ''
881
882        """
883        varlist = vars()
884        if unit in ['', 'pixel', 'channel']:
885            unit = ''
886        inf = list(self._getcoordinfo())
887        inf[0] = unit
888        self._setcoordinfo(inf)
889        self._add_history("set_unit", varlist)
890
891    @asaplog_post_dec
892    def set_instrument(self, instr):
893        """\
894        Set the instrument for subsequent processing.
895
896        Parameters:
897
898            instr:    Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
899                      'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
900
901        """
902        self._setInstrument(instr)
903        self._add_history("set_instument", vars())
904
905    @asaplog_post_dec
906    def set_feedtype(self, feedtype):
907        """\
908        Overwrite the feed type, which might not be set correctly.
909
910        Parameters:
911
912            feedtype:     'linear' or 'circular'
913
914        """
915        self._setfeedtype(feedtype)
916        self._add_history("set_feedtype", vars())
917
918    @asaplog_post_dec
919    def set_doppler(self, doppler='RADIO'):
920        """\
921        Set the doppler for all following operations on this scantable.
922
923        Parameters:
924
925            doppler:    One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
926
927        """
928        varlist = vars()
929        inf = list(self._getcoordinfo())
930        inf[2] = doppler
931        self._setcoordinfo(inf)
932        self._add_history("set_doppler", vars())
933
934    @asaplog_post_dec
935    def set_freqframe(self, frame=None):
936        """\
937        Set the frame type of the Spectral Axis.
938
939        Parameters:
940
941            frame:   an optional frame type, default 'LSRK'. Valid frames are:
942                     'TOPO', 'LSRD', 'LSRK', 'BARY',
943                     'GEO', 'GALACTO', 'LGROUP', 'CMB'
944
945        Example::
946
947            scan.set_freqframe('BARY')
948
949        """
950        frame = frame or rcParams['scantable.freqframe']
951        varlist = vars()
952        # "REST" is not implemented in casacore
953        #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
954        #           'GEO', 'GALACTO', 'LGROUP', 'CMB']
955        valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
956                   'GEO', 'GALACTO', 'LGROUP', 'CMB']
957
958        if frame in valid:
959            inf = list(self._getcoordinfo())
960            inf[1] = frame
961            self._setcoordinfo(inf)
962            self._add_history("set_freqframe", varlist)
963        else:
964            msg  = "Please specify a valid freq type. Valid types are:\n", valid
965            raise TypeError(msg)
966
967    @asaplog_post_dec
968    def set_dirframe(self, frame=""):
969        """\
970        Set the frame type of the Direction on the sky.
971
972        Parameters:
973
974            frame:   an optional frame type, default ''. Valid frames are:
975                     'J2000', 'B1950', 'GALACTIC'
976
977        Example:
978
979            scan.set_dirframe('GALACTIC')
980
981        """
982        varlist = vars()
983        Scantable.set_dirframe(self, frame)
984        self._add_history("set_dirframe", varlist)
985
986    def get_unit(self):
987        """\
988        Get the default unit set in this scantable
989
990        Returns:
991
992            A unit string
993
994        """
995        inf = self._getcoordinfo()
996        unit = inf[0]
997        if unit == '': unit = 'channel'
998        return unit
999
1000    @asaplog_post_dec
1001    def get_abcissa(self, rowno=0):
1002        """\
1003        Get the abcissa in the current coordinate setup for the currently
1004        selected Beam/IF/Pol
1005
1006        Parameters:
1007
1008            rowno:    an optional row number in the scantable. Default is the
1009                      first row, i.e. rowno=0
1010
1011        Returns:
1012
1013            The abcissa values and the format string (as a dictionary)
1014
1015        """
1016        abc = self._getabcissa(rowno)
1017        lbl = self._getabcissalabel(rowno)
1018        return abc, lbl
1019
1020    @asaplog_post_dec
1021    def flag(self, row=-1, mask=None, unflag=False):
1022        """\
1023        Flag the selected data using an optional channel mask.
1024
1025        Parameters:
1026
1027            row:    an optional row number in the scantable.
1028                      Default -1 flags all rows
1029                     
1030            mask:   an optional channel mask, created with create_mask. Default
1031                    (no mask) is all channels.
1032
1033            unflag:    if True, unflag the data
1034
1035        """
1036        varlist = vars()
1037        mask = mask or []
1038        self._flag(row, mask, unflag)
1039        self._add_history("flag", varlist)
1040
1041    @asaplog_post_dec
1042    def flag_row(self, rows=[], unflag=False):
1043        """\
1044        Flag the selected data in row-based manner.
1045
1046        Parameters:
1047
1048            rows:   list of row numbers to be flagged. Default is no row
1049                    (must be explicitly specified to execute row-based flagging).
1050
1051            unflag: if True, unflag the data.
1052
1053        """
1054        varlist = vars()
1055        self._flag_row(rows, unflag)
1056        self._add_history("flag_row", varlist)
1057
1058    @asaplog_post_dec
1059    def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
1060        """\
1061        Flag the selected data outside a specified range (in channel-base)
1062
1063        Parameters:
1064
1065            uthres:      upper threshold.
1066
1067            dthres:      lower threshold
1068
1069            clipoutside: True for flagging data outside the range [dthres:uthres].
1070                         False for flagging data inside the range.
1071
1072            unflag:      if True, unflag the data.
1073
1074        """
1075        varlist = vars()
1076        self._clip(uthres, dthres, clipoutside, unflag)
1077        self._add_history("clip", varlist)
1078
1079    @asaplog_post_dec
1080    def lag_flag(self, start, end, unit="MHz", insitu=None):
1081        """\
1082        Flag the data in 'lag' space by providing a frequency to remove.
1083        Flagged data in the scantable gets interpolated over the region.
1084        No taper is applied.
1085
1086        Parameters:
1087
1088            start:    the start frequency (really a period within the
1089                      bandwidth)  or period to remove
1090
1091            end:      the end frequency or period to remove
1092
1093            unit:     the frequency unit (default "MHz") or "" for
1094                      explicit lag channels
1095
1096        *Notes*:
1097
1098            It is recommended to flag edges of the band or strong
1099            signals beforehand.
1100
1101        """
1102        if insitu is None: insitu = rcParams['insitu']
1103        self._math._setinsitu(insitu)
1104        varlist = vars()
1105        base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1106        if not (unit == "" or base.has_key(unit)):
1107            raise ValueError("%s is not a valid unit." % unit)
1108        if unit == "":
1109            s = scantable(self._math._lag_flag(self, start, end, "lags"))
1110        else:
1111            s = scantable(self._math._lag_flag(self, start*base[unit],
1112                                               end*base[unit], "frequency"))
1113        s._add_history("lag_flag", varlist)
1114        if insitu:
1115            self._assign(s)
1116        else:
1117            return s
1118
1119    @asaplog_post_dec
1120    def create_mask(self, *args, **kwargs):
1121        """\
1122        Compute and return a mask based on [min, max] windows.
1123        The specified windows are to be INCLUDED, when the mask is
1124        applied.
1125
1126        Parameters:
1127
1128            [min, max], [min2, max2], ...
1129                Pairs of start/end points (inclusive)specifying the regions
1130                to be masked
1131
1132            invert:     optional argument. If specified as True,
1133                        return an inverted mask, i.e. the regions
1134                        specified are EXCLUDED
1135
1136            row:        create the mask using the specified row for
1137                        unit conversions, default is row=0
1138                        only necessary if frequency varies over rows.
1139
1140        Examples::
1141
1142            scan.set_unit('channel')
1143            # a)
1144            msk = scan.create_mask([400, 500], [800, 900])
1145            # masks everything outside 400 and 500
1146            # and 800 and 900 in the unit 'channel'
1147
1148            # b)
1149            msk = scan.create_mask([400, 500], [800, 900], invert=True)
1150            # masks the regions between 400 and 500
1151            # and 800 and 900 in the unit 'channel'
1152
1153            # c)
1154            #mask only channel 400
1155            msk =  scan.create_mask([400])
1156
1157        """
1158        row = kwargs.get("row", 0)
1159        data = self._getabcissa(row)
1160        u = self._getcoordinfo()[0]
1161        if u == "":
1162            u = "channel"
1163        msg = "The current mask window unit is %s" % u
1164        i = self._check_ifs()
1165        if not i:
1166            msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1167        asaplog.push(msg)
1168        n = self.nchan()
1169        msk = _n_bools(n, False)
1170        # test if args is a 'list' or a 'normal *args - UGLY!!!
1171
1172        ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1173             and args or args[0]
1174        for window in ws:
1175            if len(window) == 1:
1176                window = [window[0], window[0]]
1177            if len(window) == 0 or  len(window) > 2:
1178                raise ValueError("A window needs to be defined as [start(, end)]")
1179            if window[0] > window[1]:
1180                tmp = window[0]
1181                window[0] = window[1]
1182                window[1] = tmp
1183            for i in range(n):
1184                if data[i] >= window[0] and data[i] <= window[1]:
1185                    msk[i] = True
1186        if kwargs.has_key('invert'):
1187            if kwargs.get('invert'):
1188                msk = mask_not(msk)
1189        return msk
1190
1191    def get_masklist(self, mask=None, row=0, silent=False):
1192        """\
1193        Compute and return a list of mask windows, [min, max].
1194
1195        Parameters:
1196
1197            mask:       channel mask, created with create_mask.
1198
1199            row:        calcutate the masklist using the specified row
1200                        for unit conversions, default is row=0
1201                        only necessary if frequency varies over rows.
1202
1203        Returns:
1204
1205            [min, max], [min2, max2], ...
1206                Pairs of start/end points (inclusive)specifying
1207                the masked regions
1208
1209        """
1210        if not (isinstance(mask,list) or isinstance(mask, tuple)):
1211            raise TypeError("The mask should be list or tuple.")
1212        if len(mask) < 2:
1213            raise TypeError("The mask elements should be > 1")
1214        if self.nchan() != len(mask):
1215            msg = "Number of channels in scantable != number of mask elements"
1216            raise TypeError(msg)
1217        data = self._getabcissa(row)
1218        u = self._getcoordinfo()[0]
1219        if u == "":
1220            u = "channel"
1221        msg = "The current mask window unit is %s" % u
1222        i = self._check_ifs()
1223        if not i:
1224            msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1225        if not silent:
1226            asaplog.push(msg)
1227        masklist=[]
1228        ist, ien = None, None
1229        ist, ien=self.get_mask_indices(mask)
1230        if ist is not None and ien is not None:
1231            for i in xrange(len(ist)):
1232                range=[data[ist[i]],data[ien[i]]]
1233                range.sort()
1234                masklist.append([range[0],range[1]])
1235        return masklist
1236
1237    def get_mask_indices(self, mask=None):
1238        """\
1239        Compute and Return lists of mask start indices and mask end indices.
1240
1241        Parameters:
1242
1243            mask:       channel mask, created with create_mask.
1244
1245        Returns:
1246
1247            List of mask start indices and that of mask end indices,
1248            i.e., [istart1,istart2,....], [iend1,iend2,....].
1249
1250        """
1251        if not (isinstance(mask,list) or isinstance(mask, tuple)):
1252            raise TypeError("The mask should be list or tuple.")
1253        if len(mask) < 2:
1254            raise TypeError("The mask elements should be > 1")
1255        istart=[]
1256        iend=[]
1257        if mask[0]: istart.append(0)
1258        for i in range(len(mask)-1):
1259            if not mask[i] and mask[i+1]:
1260                istart.append(i+1)
1261            elif mask[i] and not mask[i+1]:
1262                iend.append(i)
1263        if mask[len(mask)-1]: iend.append(len(mask)-1)
1264        if len(istart) != len(iend):
1265            raise RuntimeError("Numbers of mask start != mask end.")
1266        for i in range(len(istart)):
1267            if istart[i] > iend[i]:
1268                raise RuntimeError("Mask start index > mask end index")
1269                break
1270        return istart,iend
1271
1272#    def get_restfreqs(self):
1273#        """
1274#        Get the restfrequency(s) stored in this scantable.
1275#        The return value(s) are always of unit 'Hz'
1276#        Parameters:
1277#            none
1278#        Returns:
1279#            a list of doubles
1280#        """
1281#        return list(self._getrestfreqs())
1282
1283    def get_restfreqs(self, ids=None):
1284        """\
1285        Get the restfrequency(s) stored in this scantable.
1286        The return value(s) are always of unit 'Hz'
1287
1288        Parameters:
1289
1290            ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1291                 be retrieved
1292
1293        Returns:
1294
1295            dictionary containing ids and a list of doubles for each id
1296
1297        """
1298        if ids is None:
1299            rfreqs={}
1300            idlist = self.getmolnos()
1301            for i in idlist:
1302                rfreqs[i]=list(self._getrestfreqs(i))
1303            return rfreqs
1304        else:
1305            if type(ids)==list or type(ids)==tuple:
1306                rfreqs={}
1307                for i in ids:
1308                    rfreqs[i]=list(self._getrestfreqs(i))
1309                return rfreqs
1310            else:
1311                return list(self._getrestfreqs(ids))
1312            #return list(self._getrestfreqs(ids))
1313
1314    def set_restfreqs(self, freqs=None, unit='Hz'):
1315        """\
1316        Set or replace the restfrequency specified and
1317        if the 'freqs' argument holds a scalar,
1318        then that rest frequency will be applied to all the selected
1319        data.  If the 'freqs' argument holds
1320        a vector, then it MUST be of equal or smaller length than
1321        the number of IFs (and the available restfrequencies will be
1322        replaced by this vector).  In this case, *all* data have
1323        the restfrequency set per IF according
1324        to the corresponding value you give in the 'freqs' vector.
1325        E.g. 'freqs=[1e9, 2e9]'  would mean IF 0 gets restfreq 1e9 and
1326        IF 1 gets restfreq 2e9.
1327
1328        You can also specify the frequencies via a linecatalog.
1329
1330        Parameters:
1331
1332            freqs:   list of rest frequency values or string idenitfiers
1333
1334            unit:    unit for rest frequency (default 'Hz')
1335
1336
1337        Example::
1338
1339            # set the given restfrequency for the all currently selected IFs
1340            scan.set_restfreqs(freqs=1.4e9)
1341            # set restfrequencies for the n IFs  (n > 1) in the order of the
1342            # list, i.e
1343            # IF0 -> 1.4e9, IF1 ->  1.41e9, IF3 -> 1.42e9
1344            # len(list_of_restfreqs) == nIF
1345            # for nIF == 1 the following will set multiple restfrequency for
1346            # that IF
1347            scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
1348            # set multiple restfrequencies per IF. as a list of lists where
1349            # the outer list has nIF elements, the inner s arbitrary
1350            scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
1351
1352       *Note*:
1353
1354            To do more sophisticate Restfrequency setting, e.g. on a
1355            source and IF basis, use scantable.set_selection() before using
1356            this function::
1357
1358                # provided your scantable is called scan
1359                selection = selector()
1360                selection.set_name("ORION*")
1361                selection.set_ifs([1])
1362                scan.set_selection(selection)
1363                scan.set_restfreqs(freqs=86.6e9)
1364
1365        """
1366        varlist = vars()
1367        from asap import linecatalog
1368        # simple  value
1369        if isinstance(freqs, int) or isinstance(freqs, float):
1370            self._setrestfreqs([freqs], [""], unit)
1371        # list of values
1372        elif isinstance(freqs, list) or isinstance(freqs, tuple):
1373            # list values are scalars
1374            if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
1375                if len(freqs) == 1:
1376                    self._setrestfreqs(freqs, [""], unit)
1377                else:
1378                    # allow the 'old' mode of setting mulitple IFs
1379                    sel = selector()
1380                    savesel = self._getselection()
1381                    iflist = self.getifnos()
1382                    if len(freqs)>len(iflist):
1383                        raise ValueError("number of elements in list of list "
1384                                         "exeeds the current IF selections")
1385                    iflist = self.getifnos()
1386                    for i, fval in enumerate(freqs):
1387                        sel.set_ifs(iflist[i])
1388                        self._setselection(sel)
1389                        self._setrestfreqs([fval], [""], unit)
1390                    self._setselection(savesel)
1391
1392            # list values are dict, {'value'=, 'name'=)
1393            elif isinstance(freqs[-1], dict):
1394                values = []
1395                names = []
1396                for d in freqs:
1397                    values.append(d["value"])
1398                    names.append(d["name"])
1399                self._setrestfreqs(values, names, unit)
1400            elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
1401                sel = selector()
1402                savesel = self._getselection()
1403                iflist = self.getifnos()
1404                if len(freqs)>len(iflist):
1405                    raise ValueError("number of elements in list of list exeeds"
1406                                     " the current IF selections")
1407                for i, fval in enumerate(freqs):
1408                    sel.set_ifs(iflist[i])
1409                    self._setselection(sel)
1410                    self._setrestfreqs(fval, [""], unit)
1411                self._setselection(savesel)
1412        # freqs are to be taken from a linecatalog
1413        elif isinstance(freqs, linecatalog):
1414            sel = selector()
1415            savesel = self._getselection()
1416            for i in xrange(freqs.nrow()):
1417                sel.set_ifs(iflist[i])
1418                self._setselection(sel)
1419                self._setrestfreqs([freqs.get_frequency(i)],
1420                                   [freqs.get_name(i)], "MHz")
1421                # ensure that we are not iterating past nIF
1422                if i == self.nif()-1: break
1423            self._setselection(savesel)
1424        else:
1425            return
1426        self._add_history("set_restfreqs", varlist)
1427
1428    def shift_refpix(self, delta):
1429        """\
1430        Shift the reference pixel of the Spectra Coordinate by an
1431        integer amount.
1432
1433        Parameters:
1434
1435            delta:   the amount to shift by
1436
1437        *Note*:
1438
1439            Be careful using this with broadband data.
1440
1441        """
1442        Scantable.shift_refpix(self, delta)
1443
1444    @asaplog_post_dec
1445    def history(self, filename=None):
1446        """\
1447        Print the history. Optionally to a file.
1448
1449        Parameters:
1450
1451            filename:    The name of the file to save the history to.
1452
1453        """
1454        hist = list(self._gethistory())
1455        out = "-"*80
1456        for h in hist:
1457            if h.startswith("---"):
1458                out = "\n".join([out, h])
1459            else:
1460                items = h.split("##")
1461                date = items[0]
1462                func = items[1]
1463                items = items[2:]
1464                out += "\n"+date+"\n"
1465                out += "Function: %s\n  Parameters:" % (func)
1466                for i in items:
1467                    if i == '':
1468                        continue
1469                    s = i.split("=")
1470                    out += "\n   %s = %s" % (s[0], s[1])
1471                out = "\n".join([out, "-"*80])
1472        if filename is not None:
1473            if filename is "":
1474                filename = 'scantable_history.txt'
1475            import os
1476            filename = os.path.expandvars(os.path.expanduser(filename))
1477            if not os.path.isdir(filename):
1478                data = open(filename, 'w')
1479                data.write(out)
1480                data.close()
1481            else:
1482                msg = "Illegal file name '%s'." % (filename)
1483                raise IOError(msg)
1484        return page(out)
1485    #
1486    # Maths business
1487    #
1488    @asaplog_post_dec
1489    def average_time(self, mask=None, scanav=False, weight='tint', align=False):
1490        """\
1491        Return the (time) weighted average of a scan.
1492
1493        *Note*:
1494
1495            in channels only - align if necessary
1496
1497        Parameters:
1498
1499            mask:     an optional mask (only used for 'var' and 'tsys'
1500                      weighting)
1501
1502            scanav:   True averages each scan separately
1503                      False (default) averages all scans together,
1504
1505            weight:   Weighting scheme.
1506                      'none'     (mean no weight)
1507                      'var'      (1/var(spec) weighted)
1508                      'tsys'     (1/Tsys**2 weighted)
1509                      'tint'     (integration time weighted)
1510                      'tintsys'  (Tint/Tsys**2)
1511                      'median'   ( median averaging)
1512                      The default is 'tint'
1513
1514            align:    align the spectra in velocity before averaging. It takes
1515                      the time of the first spectrum as reference time.
1516
1517        Example::
1518
1519            # time average the scantable without using a mask
1520            newscan = scan.average_time()
1521
1522        """
1523        varlist = vars()
1524        weight = weight or 'TINT'
1525        mask = mask or ()
1526        scanav = (scanav and 'SCAN') or 'NONE'
1527        scan = (self, )
1528
1529        if align:
1530            scan = (self.freq_align(insitu=False), )
1531        s = None
1532        if weight.upper() == 'MEDIAN':
1533            s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1534                                                     scanav))
1535        else:
1536            s = scantable(self._math._average(scan, mask, weight.upper(),
1537                          scanav))
1538        s._add_history("average_time", varlist)
1539        return s
1540
1541    @asaplog_post_dec
1542    def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
1543        """\
1544        Return a scan where all spectra are converted to either
1545        Jansky or Kelvin depending upon the flux units of the scan table.
1546        By default the function tries to look the values up internally.
1547        If it can't find them (or if you want to over-ride), you must
1548        specify EITHER jyperk OR eta (and D which it will try to look up
1549        also if you don't set it). jyperk takes precedence if you set both.
1550
1551        Parameters:
1552
1553            jyperk:      the Jy / K conversion factor
1554
1555            eta:         the aperture efficiency
1556
1557            d:           the geometric diameter (metres)
1558
1559            insitu:      if False a new scantable is returned.
1560                         Otherwise, the scaling is done in-situ
1561                         The default is taken from .asaprc (False)
1562
1563        """
1564        if insitu is None: insitu = rcParams['insitu']
1565        self._math._setinsitu(insitu)
1566        varlist = vars()
1567        jyperk = jyperk or -1.0
1568        d = d or -1.0
1569        eta = eta or -1.0
1570        s = scantable(self._math._convertflux(self, d, eta, jyperk))
1571        s._add_history("convert_flux", varlist)
1572        if insitu: self._assign(s)
1573        else: return s
1574
1575    @asaplog_post_dec
1576    def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1577        """\
1578        Return a scan after applying a gain-elevation correction.
1579        The correction can be made via either a polynomial or a
1580        table-based interpolation (and extrapolation if necessary).
1581        You specify polynomial coefficients, an ascii table or neither.
1582        If you specify neither, then a polynomial correction will be made
1583        with built in coefficients known for certain telescopes (an error
1584        will occur if the instrument is not known).
1585        The data and Tsys are *divided* by the scaling factors.
1586
1587        Parameters:
1588
1589            poly:        Polynomial coefficients (default None) to compute a
1590                         gain-elevation correction as a function of
1591                         elevation (in degrees).
1592
1593            filename:    The name of an ascii file holding correction factors.
1594                         The first row of the ascii file must give the column
1595                         names and these MUST include columns
1596                         "ELEVATION" (degrees) and "FACTOR" (multiply data
1597                         by this) somewhere.
1598                         The second row must give the data type of the
1599                         column. Use 'R' for Real and 'I' for Integer.
1600                         An example file would be
1601                         (actual factors are arbitrary) :
1602
1603                         TIME ELEVATION FACTOR
1604                         R R R
1605                         0.1 0 0.8
1606                         0.2 20 0.85
1607                         0.3 40 0.9
1608                         0.4 60 0.85
1609                         0.5 80 0.8
1610                         0.6 90 0.75
1611
1612            method:      Interpolation method when correcting from a table.
1613                         Values are  "nearest", "linear" (default), "cubic"
1614                         and "spline"
1615
1616            insitu:      if False a new scantable is returned.
1617                         Otherwise, the scaling is done in-situ
1618                         The default is taken from .asaprc (False)
1619
1620        """
1621
1622        if insitu is None: insitu = rcParams['insitu']
1623        self._math._setinsitu(insitu)
1624        varlist = vars()
1625        poly = poly or ()
1626        from os.path import expandvars
1627        filename = expandvars(filename)
1628        s = scantable(self._math._gainel(self, poly, filename, method))
1629        s._add_history("gain_el", varlist)
1630        if insitu:
1631            self._assign(s)
1632        else:
1633            return s
1634
1635    @asaplog_post_dec
1636    def freq_align(self, reftime=None, method='cubic', insitu=None):
1637        """\
1638        Return a scan where all rows have been aligned in frequency/velocity.
1639        The alignment frequency frame (e.g. LSRK) is that set by function
1640        set_freqframe.
1641
1642        Parameters:
1643
1644            reftime:     reference time to align at. By default, the time of
1645                         the first row of data is used.
1646
1647            method:      Interpolation method for regridding the spectra.
1648                         Choose from "nearest", "linear", "cubic" (default)
1649                         and "spline"
1650
1651            insitu:      if False a new scantable is returned.
1652                         Otherwise, the scaling is done in-situ
1653                         The default is taken from .asaprc (False)
1654
1655        """
1656        if insitu is None: insitu = rcParams["insitu"]
1657        self._math._setinsitu(insitu)
1658        varlist = vars()
1659        reftime = reftime or ""
1660        s = scantable(self._math._freq_align(self, reftime, method))
1661        s._add_history("freq_align", varlist)
1662        if insitu: self._assign(s)
1663        else: return s
1664
1665    @asaplog_post_dec
1666    def opacity(self, tau=None, insitu=None):
1667        """\
1668        Apply an opacity correction. The data
1669        and Tsys are multiplied by the correction factor.
1670
1671        Parameters:
1672
1673            tau:         (list of) opacity from which the correction factor is
1674                         exp(tau*ZD)
1675                         where ZD is the zenith-distance.
1676                         If a list is provided, it has to be of length nIF,
1677                         nIF*nPol or 1 and in order of IF/POL, e.g.
1678                         [opif0pol0, opif0pol1, opif1pol0 ...]
1679                         if tau is `None` the opacities are determined from a
1680                         model.
1681
1682            insitu:      if False a new scantable is returned.
1683                         Otherwise, the scaling is done in-situ
1684                         The default is taken from .asaprc (False)
1685
1686        """
1687        if insitu is None: insitu = rcParams['insitu']
1688        self._math._setinsitu(insitu)
1689        varlist = vars()
1690        if not hasattr(tau, "__len__"):
1691            tau = [tau]
1692        s = scantable(self._math._opacity(self, tau))
1693        s._add_history("opacity", varlist)
1694        if insitu: self._assign(s)
1695        else: return s
1696
1697    @asaplog_post_dec
1698    def bin(self, width=5, insitu=None):
1699        """\
1700        Return a scan where all spectra have been binned up.
1701
1702        Parameters:
1703
1704            width:       The bin width (default=5) in pixels
1705
1706            insitu:      if False a new scantable is returned.
1707                         Otherwise, the scaling is done in-situ
1708                         The default is taken from .asaprc (False)
1709
1710        """
1711        if insitu is None: insitu = rcParams['insitu']
1712        self._math._setinsitu(insitu)
1713        varlist = vars()
1714        s = scantable(self._math._bin(self, width))
1715        s._add_history("bin", varlist)
1716        if insitu:
1717            self._assign(s)
1718        else:
1719            return s
1720
1721    @asaplog_post_dec
1722    def resample(self, width=5, method='cubic', insitu=None):
1723        """\
1724        Return a scan where all spectra have been binned up.
1725
1726        Parameters:
1727
1728            width:       The bin width (default=5) in pixels
1729
1730            method:      Interpolation method when correcting from a table.
1731                         Values are  "nearest", "linear", "cubic" (default)
1732                         and "spline"
1733
1734            insitu:      if False a new scantable is returned.
1735                         Otherwise, the scaling is done in-situ
1736                         The default is taken from .asaprc (False)
1737
1738        """
1739        if insitu is None: insitu = rcParams['insitu']
1740        self._math._setinsitu(insitu)
1741        varlist = vars()
1742        s = scantable(self._math._resample(self, method, width))
1743        s._add_history("resample", varlist)
1744        if insitu: self._assign(s)
1745        else: return s
1746
1747    @asaplog_post_dec
1748    def average_pol(self, mask=None, weight='none'):
1749        """\
1750        Average the Polarisations together.
1751
1752        Parameters:
1753
1754            mask:        An optional mask defining the region, where the
1755                         averaging will be applied. The output will have all
1756                         specified points masked.
1757
1758            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1759                         weighted), or 'tsys' (1/Tsys**2 weighted)
1760
1761        """
1762        varlist = vars()
1763        mask = mask or ()
1764        s = scantable(self._math._averagepol(self, mask, weight.upper()))
1765        s._add_history("average_pol", varlist)
1766        return s
1767
1768    @asaplog_post_dec
1769    def average_beam(self, mask=None, weight='none'):
1770        """\
1771        Average the Beams together.
1772
1773        Parameters:
1774            mask:        An optional mask defining the region, where the
1775                         averaging will be applied. The output will have all
1776                         specified points masked.
1777
1778            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1779                         weighted), or 'tsys' (1/Tsys**2 weighted)
1780
1781        """
1782        varlist = vars()
1783        mask = mask or ()
1784        s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1785        s._add_history("average_beam", varlist)
1786        return s
1787
1788    def parallactify(self, pflag):
1789        """\
1790        Set a flag to indicate whether this data should be treated as having
1791        been 'parallactified' (total phase == 0.0)
1792
1793        Parameters:
1794
1795            pflag:  Bool indicating whether to turn this on (True) or
1796                    off (False)
1797
1798        """
1799        varlist = vars()
1800        self._parallactify(pflag)
1801        self._add_history("parallactify", varlist)
1802
1803    @asaplog_post_dec
1804    def convert_pol(self, poltype=None):
1805        """\
1806        Convert the data to a different polarisation type.
1807        Note that you will need cross-polarisation terms for most conversions.
1808
1809        Parameters:
1810
1811            poltype:    The new polarisation type. Valid types are:
1812                        "linear", "circular", "stokes" and "linpol"
1813
1814        """
1815        varlist = vars()
1816        s = scantable(self._math._convertpol(self, poltype))
1817        s._add_history("convert_pol", varlist)
1818        return s
1819
1820    @asaplog_post_dec
1821    def smooth(self, kernel="hanning", width=5.0, order=2, plot=False, insitu=None):
1822        """\
1823        Smooth the spectrum by the specified kernel (conserving flux).
1824
1825        Parameters:
1826
1827            kernel:     The type of smoothing kernel. Select from
1828                        'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1829                        or 'poly'
1830
1831            width:      The width of the kernel in pixels. For hanning this is
1832                        ignored otherwise it defauls to 5 pixels.
1833                        For 'gaussian' it is the Full Width Half
1834                        Maximum. For 'boxcar' it is the full width.
1835                        For 'rmedian' and 'poly' it is the half width.
1836
1837            order:      Optional parameter for 'poly' kernel (default is 2), to
1838                        specify the order of the polnomial. Ignored by all other
1839                        kernels.
1840
1841            plot:       plot the original and the smoothed spectra.
1842                        In this each indivual fit has to be approved, by
1843                        typing 'y' or 'n'
1844
1845            insitu:     if False a new scantable is returned.
1846                        Otherwise, the scaling is done in-situ
1847                        The default is taken from .asaprc (False)
1848
1849        """
1850        if insitu is None: insitu = rcParams['insitu']
1851        self._math._setinsitu(insitu)
1852        varlist = vars()
1853
1854        if plot: orgscan = self.copy()
1855
1856        s = scantable(self._math._smooth(self, kernel.lower(), width, order))
1857        s._add_history("smooth", varlist)
1858
1859        if plot:
1860            if rcParams['plotter.gui']:
1861                from asap.asaplotgui import asaplotgui as asaplot
1862            else:
1863                from asap.asaplot import asaplot
1864            self._p=asaplot()
1865            self._p.set_panels()
1866            ylab=s._get_ordinate_label()
1867            #self._p.palette(0,["#777777","red"])
1868            for r in xrange(s.nrow()):
1869                xsm=s._getabcissa(r)
1870                ysm=s._getspectrum(r)
1871                xorg=orgscan._getabcissa(r)
1872                yorg=orgscan._getspectrum(r)
1873                self._p.clear()
1874                self._p.hold()
1875                self._p.set_axes('ylabel',ylab)
1876                self._p.set_axes('xlabel',s._getabcissalabel(r))
1877                self._p.set_axes('title',s._getsourcename(r))
1878                self._p.set_line(label='Original',color="#777777")
1879                self._p.plot(xorg,yorg)
1880                self._p.set_line(label='Smoothed',color="red")
1881                self._p.plot(xsm,ysm)
1882                ### Ugly part for legend
1883                for i in [0,1]:
1884                    self._p.subplots[0]['lines'].append([self._p.subplots[0]['axes'].lines[i]])
1885                self._p.release()
1886                ### Ugly part for legend
1887                self._p.subplots[0]['lines']=[]
1888                res = raw_input("Accept smoothing ([y]/n): ")
1889                if res.upper() == 'N':
1890                    s._setspectrum(yorg, r)
1891            self._p.unmap()
1892            self._p = None
1893            del orgscan
1894
1895        if insitu: self._assign(s)
1896        else: return s
1897
1898    @asaplog_post_dec
1899    def old_poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None, rows=None):
1900        """\
1901        Return a scan which has been baselined (all rows) by a polynomial.
1902       
1903        Parameters:
1904
1905            mask:       an optional mask
1906
1907            order:      the order of the polynomial (default is 0)
1908
1909            plot:       plot the fit and the residual. In this each
1910                        indivual fit has to be approved, by typing 'y'
1911                        or 'n'
1912
1913            uselin:     use linear polynomial fit
1914
1915            insitu:     if False a new scantable is returned.
1916                        Otherwise, the scaling is done in-situ
1917                        The default is taken from .asaprc (False)
1918
1919            rows:       row numbers of spectra to be processed.
1920                        (default is None: for all rows)
1921       
1922        Example:
1923            # return a scan baselined by a third order polynomial,
1924            # not using a mask
1925            bscan = scan.poly_baseline(order=3)
1926
1927        """
1928        if insitu is None: insitu = rcParams['insitu']
1929        if not insitu:
1930            workscan = self.copy()
1931        else:
1932            workscan = self
1933        varlist = vars()
1934        if mask is None:
1935            mask = [True for i in xrange(self.nchan())]
1936
1937        try:
1938            f = fitter()
1939            if uselin:
1940                f.set_function(lpoly=order)
1941            else:
1942                f.set_function(poly=order)
1943
1944            if rows == None:
1945                rows = xrange(workscan.nrow())
1946            elif isinstance(rows, int):
1947                rows = [ rows ]
1948           
1949            if len(rows) > 0:
1950                self.blpars = []
1951                self.masklists = []
1952                self.actualmask = []
1953           
1954            for r in rows:
1955                f.x = workscan._getabcissa(r)
1956                f.y = workscan._getspectrum(r)
1957                f.mask = mask_and(mask, workscan._getmask(r))    # (CAS-1434)
1958                f.data = None
1959                f.fit()
1960                if plot:
1961                    f.plot(residual=True)
1962                    x = raw_input("Accept fit ( [y]/n ): ")
1963                    if x.upper() == 'N':
1964                        self.blpars.append(None)
1965                        self.masklists.append(None)
1966                        self.actualmask.append(None)
1967                        continue
1968                workscan._setspectrum(f.fitter.getresidual(), r)
1969                self.blpars.append(f.get_parameters())
1970                self.masklists.append(workscan.get_masklist(f.mask, row=r, silent=True))
1971                self.actualmask.append(f.mask)
1972
1973            if plot:
1974                f._p.unmap()
1975                f._p = None
1976            workscan._add_history("poly_baseline", varlist)
1977            if insitu:
1978                self._assign(workscan)
1979            else:
1980                return workscan
1981        except RuntimeError:
1982            msg = "The fit failed, possibly because it didn't converge."
1983            raise RuntimeError(msg)
1984
1985    @asaplog_post_dec
1986    def poly_baseline(self, mask=None, order=0, plot=False, batch=False, insitu=None, rows=None):
1987        """\
1988        Return a scan which has been baselined (all rows) by a polynomial.
1989        Parameters:
1990            mask:       an optional mask
1991            order:      the order of the polynomial (default is 0)
1992            plot:       plot the fit and the residual. In this each
1993                        indivual fit has to be approved, by typing 'y'
1994                        or 'n'. Ignored if batch = True.
1995            batch:      if True a faster algorithm is used and logs
1996                        including the fit results are not output
1997                        (default is False)
1998            insitu:     if False a new scantable is returned.
1999                        Otherwise, the scaling is done in-situ
2000                        The default is taken from .asaprc (False)
2001            rows:       row numbers of spectra to be baselined.
2002                        (default is None: for all rows)
2003        Example:
2004            # return a scan baselined by a third order polynomial,
2005            # not using a mask
2006            bscan = scan.poly_baseline(order=3)
2007        """
2008       
2009        varlist = vars()
2010       
2011        if insitu is None: insitu = rcParams["insitu"]
2012        if insitu:
2013            workscan = self
2014        else:
2015            workscan = self.copy()
2016
2017        nchan = workscan.nchan()
2018       
2019        if mask is None:
2020            mask = [True for i in xrange(nchan)]
2021
2022        try:
2023            if rows == None:
2024                rows = xrange(workscan.nrow())
2025            elif isinstance(rows, int):
2026                rows = [ rows ]
2027           
2028            if len(rows) > 0:
2029                workscan.blpars = []
2030                workscan.masklists = []
2031                workscan.actualmask = []
2032
2033            if batch:
2034                workscan._poly_baseline_batch(mask, order)
2035            elif plot:
2036                f = fitter()
2037                f.set_function(lpoly=order)
2038                for r in rows:
2039                    f.x = workscan._getabcissa(r)
2040                    f.y = workscan._getspectrum(r)
2041                    f.mask = mask_and(mask, workscan._getmask(r))    # (CAS-1434)
2042                    f.data = None
2043                    f.fit()
2044                   
2045                    f.plot(residual=True)
2046                    accept_fit = raw_input("Accept fit ( [y]/n ): ")
2047                    if accept_fit.upper() == "N":
2048                        self.blpars.append(None)
2049                        self.masklists.append(None)
2050                        self.actualmask.append(None)
2051                        continue
2052                    workscan._setspectrum(f.fitter.getresidual(), r)
2053                    workscan.blpars.append(f.get_parameters())
2054                    workscan.masklists.append(workscan.get_masklist(f.mask, row=r))
2055                    workscan.actualmask.append(f.mask)
2056                   
2057                f._p.unmap()
2058                f._p = None
2059            else:
2060                for r in rows:
2061                    fitparams = workscan._poly_baseline(mask, order, r)
2062                    params = fitparams.getparameters()
2063                    fmtd = ", ".join(["p%d = %3.6f" % (i, v) for i, v in enumerate(params)])
2064                    errors = fitparams.geterrors()
2065                    fmask = mask_and(mask, workscan._getmask(r))
2066
2067                    workscan.blpars.append({"params":params,
2068                                            "fixed": fitparams.getfixedparameters(),
2069                                            "formatted":fmtd, "errors":errors})
2070                    workscan.masklists.append(workscan.get_masklist(fmask, r, silent=True))
2071                    workscan.actualmask.append(fmask)
2072                   
2073                    asaplog.push(fmtd)
2074           
2075            workscan._add_history("poly_baseline", varlist)
2076           
2077            if insitu:
2078                self._assign(workscan)
2079            else:
2080                return workscan
2081           
2082        except RuntimeError, e:
2083            msg = "The fit failed, possibly because it didn't converge."
2084            if rcParams["verbose"]:
2085                asaplog.push(str(e))
2086                asaplog.push(str(msg))
2087                return
2088            else:
2089                raise RuntimeError(str(e)+'\n'+msg)
2090
2091
2092    def auto_poly_baseline(self, mask=None, edge=(0, 0), order=0,
2093                           threshold=3, chan_avg_limit=1, plot=False,
2094                           insitu=None, rows=None):
2095        """\
2096        Return a scan which has been baselined (all rows) by a polynomial.
2097        Spectral lines are detected first using linefinder and masked out
2098        to avoid them affecting the baseline solution.
2099
2100        Parameters:
2101
2102            mask:       an optional mask retreived from scantable
2103
2104            edge:       an optional number of channel to drop at the edge of
2105                        spectrum. If only one value is
2106                        specified, the same number will be dropped from
2107                        both sides of the spectrum. Default is to keep
2108                        all channels. Nested tuples represent individual
2109                        edge selection for different IFs (a number of spectral
2110                        channels can be different)
2111
2112            order:      the order of the polynomial (default is 0)
2113
2114            threshold:  the threshold used by line finder. It is better to
2115                        keep it large as only strong lines affect the
2116                        baseline solution.
2117
2118            chan_avg_limit:
2119                        a maximum number of consequtive spectral channels to
2120                        average during the search of weak and broad lines.
2121                        The default is no averaging (and no search for weak
2122                        lines). If such lines can affect the fitted baseline
2123                        (e.g. a high order polynomial is fitted), increase this
2124                        parameter (usually values up to 8 are reasonable). Most
2125                        users of this method should find the default value
2126                        sufficient.
2127
2128            plot:       plot the fit and the residual. In this each
2129                        indivual fit has to be approved, by typing 'y'
2130                        or 'n'
2131
2132            insitu:     if False a new scantable is returned.
2133                        Otherwise, the scaling is done in-situ
2134                        The default is taken from .asaprc (False)
2135            rows:       row numbers of spectra to be processed.
2136                        (default is None: for all rows)
2137
2138
2139        Example::
2140
2141            scan2 = scan.auto_poly_baseline(order=7, insitu=False)
2142
2143        """
2144        if insitu is None: insitu = rcParams['insitu']
2145        varlist = vars()
2146        from asap.asaplinefind import linefinder
2147        from asap import _is_sequence_or_number as _is_valid
2148
2149        # check whether edge is set up for each IF individually
2150        individualedge = False;
2151        if len(edge) > 1:
2152            if isinstance(edge[0], list) or isinstance(edge[0], tuple):
2153                individualedge = True;
2154
2155        if not _is_valid(edge, int) and not individualedge:
2156            raise ValueError, "Parameter 'edge' has to be an integer or a \
2157            pair of integers specified as a tuple. Nested tuples are allowed \
2158            to make individual selection for different IFs."
2159
2160        curedge = (0, 0)
2161        if individualedge:
2162            for edgepar in edge:
2163                if not _is_valid(edgepar, int):
2164                    raise ValueError, "Each element of the 'edge' tuple has \
2165                                       to be a pair of integers or an integer."
2166        else:
2167            curedge = edge;
2168
2169        if not insitu:
2170            workscan = self.copy()
2171        else:
2172            workscan = self
2173
2174        # setup fitter
2175        f = fitter()
2176        f.set_function(lpoly=order)
2177
2178        # setup line finder
2179        fl = linefinder()
2180        fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
2181
2182        fl.set_scan(workscan)
2183
2184        if mask is None:
2185            mask = _n_bools(workscan.nchan(), True)
2186       
2187        if rows is None:
2188            rows = xrange(workscan.nrow())
2189        elif isinstance(rows, int):
2190            rows = [ rows ]
2191       
2192        # Save parameters of baseline fits & masklists as a class attribute.
2193        # NOTICE: It does not reflect changes in scantable!
2194        if len(rows) > 0:
2195            self.blpars=[]
2196            self.masklists=[]
2197            self.actualmask=[]
2198        asaplog.push("Processing:")
2199        for r in rows:
2200            msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
2201                (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
2202                 workscan.getpol(r), workscan.getcycle(r))
2203            asaplog.push(msg, False)
2204
2205            # figure out edge parameter
2206            if individualedge:
2207                if len(edge) >= workscan.getif(r):
2208                    raise RuntimeError, "Number of edge elements appear to " \
2209                                        "be less than the number of IFs"
2210                    curedge = edge[workscan.getif(r)]
2211
2212            actualmask = mask_and(mask, workscan._getmask(r))    # (CAS-1434)
2213
2214            # setup line finder
2215            fl.find_lines(r, actualmask, curedge)
2216           
2217            f.x = workscan._getabcissa(r)
2218            f.y = workscan._getspectrum(r)
2219            f.mask = fl.get_mask()
2220            f.data = None
2221            f.fit()
2222
2223            # Show mask list
2224            masklist=workscan.get_masklist(f.mask, row=r, silent=True)
2225            msg = "mask range: "+str(masklist)
2226            asaplog.push(msg, False)
2227
2228            if plot:
2229                f.plot(residual=True)
2230                x = raw_input("Accept fit ( [y]/n ): ")
2231                if x.upper() == 'N':
2232                    self.blpars.append(None)
2233                    self.masklists.append(None)
2234                    self.actualmask.append(None)
2235                    continue
2236
2237            workscan._setspectrum(f.fitter.getresidual(), r)
2238            self.blpars.append(f.get_parameters())
2239            self.masklists.append(masklist)
2240            self.actualmask.append(f.mask)
2241        if plot:
2242            f._p.unmap()
2243            f._p = None
2244        workscan._add_history("auto_poly_baseline", varlist)
2245        if insitu:
2246            self._assign(workscan)
2247        else:
2248            return workscan
2249
2250    @asaplog_post_dec
2251    def rotate_linpolphase(self, angle):
2252        """\
2253        Rotate the phase of the complex polarization O=Q+iU correlation.
2254        This is always done in situ in the raw data.  So if you call this
2255        function more than once then each call rotates the phase further.
2256
2257        Parameters:
2258
2259            angle:   The angle (degrees) to rotate (add) by.
2260
2261        Example::
2262
2263            scan.rotate_linpolphase(2.3)
2264
2265        """
2266        varlist = vars()
2267        self._math._rotate_linpolphase(self, angle)
2268        self._add_history("rotate_linpolphase", varlist)
2269        return
2270
2271    @asaplog_post_dec
2272    def rotate_xyphase(self, angle):
2273        """\
2274        Rotate the phase of the XY correlation.  This is always done in situ
2275        in the data.  So if you call this function more than once
2276        then each call rotates the phase further.
2277
2278        Parameters:
2279
2280            angle:   The angle (degrees) to rotate (add) by.
2281
2282        Example::
2283
2284            scan.rotate_xyphase(2.3)
2285
2286        """
2287        varlist = vars()
2288        self._math._rotate_xyphase(self, angle)
2289        self._add_history("rotate_xyphase", varlist)
2290        return
2291
2292    @asaplog_post_dec
2293    def swap_linears(self):
2294        """\
2295        Swap the linear polarisations XX and YY, or better the first two
2296        polarisations as this also works for ciculars.
2297        """
2298        varlist = vars()
2299        self._math._swap_linears(self)
2300        self._add_history("swap_linears", varlist)
2301        return
2302
2303    @asaplog_post_dec
2304    def invert_phase(self):
2305        """\
2306        Invert the phase of the complex polarisation
2307        """
2308        varlist = vars()
2309        self._math._invert_phase(self)
2310        self._add_history("invert_phase", varlist)
2311        return
2312
2313    @asaplog_post_dec
2314    def add(self, offset, insitu=None):
2315        """\
2316        Return a scan where all spectra have the offset added
2317
2318        Parameters:
2319
2320            offset:      the offset
2321
2322            insitu:      if False a new scantable is returned.
2323                         Otherwise, the scaling is done in-situ
2324                         The default is taken from .asaprc (False)
2325
2326        """
2327        if insitu is None: insitu = rcParams['insitu']
2328        self._math._setinsitu(insitu)
2329        varlist = vars()
2330        s = scantable(self._math._unaryop(self, offset, "ADD", False))
2331        s._add_history("add", varlist)
2332        if insitu:
2333            self._assign(s)
2334        else:
2335            return s
2336
2337    @asaplog_post_dec
2338    def scale(self, factor, tsys=True, insitu=None):
2339        """\
2340
2341        Return a scan where all spectra are scaled by the given 'factor'
2342
2343        Parameters:
2344
2345            factor:      the scaling factor (float or 1D float list)
2346
2347            insitu:      if False a new scantable is returned.
2348                         Otherwise, the scaling is done in-situ
2349                         The default is taken from .asaprc (False)
2350
2351            tsys:        if True (default) then apply the operation to Tsys
2352                         as well as the data
2353
2354        """
2355        if insitu is None: insitu = rcParams['insitu']
2356        self._math._setinsitu(insitu)
2357        varlist = vars()
2358        s = None
2359        import numpy
2360        if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
2361            if isinstance(factor[0], list) or isinstance(factor[0], numpy.ndarray):
2362                from asapmath import _array2dOp
2363                s = _array2dOp( self.copy(), factor, "MUL", tsys )
2364            else:
2365                s = scantable( self._math._arrayop( self.copy(), factor, "MUL", tsys ) )
2366        else:
2367            s = scantable(self._math._unaryop(self.copy(), factor, "MUL", tsys))
2368        s._add_history("scale", varlist)
2369        if insitu:
2370            self._assign(s)
2371        else:
2372            return s
2373
2374    def set_sourcetype(self, match, matchtype="pattern",
2375                       sourcetype="reference"):
2376        """\
2377        Set the type of the source to be an source or reference scan
2378        using the provided pattern.
2379
2380        Parameters:
2381
2382            match:          a Unix style pattern, regular expression or selector
2383
2384            matchtype:      'pattern' (default) UNIX style pattern or
2385                            'regex' regular expression
2386
2387            sourcetype:     the type of the source to use (source/reference)
2388
2389        """
2390        varlist = vars()
2391        basesel = self.get_selection()
2392        stype = -1
2393        if sourcetype.lower().startswith("r"):
2394            stype = 1
2395        elif sourcetype.lower().startswith("s"):
2396            stype = 0
2397        else:
2398            raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
2399        if matchtype.lower().startswith("p"):
2400            matchtype = "pattern"
2401        elif matchtype.lower().startswith("r"):
2402            matchtype = "regex"
2403        else:
2404            raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
2405        sel = selector()
2406        if isinstance(match, selector):
2407            sel = match
2408        else:
2409            sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
2410        self.set_selection(basesel+sel)
2411        self._setsourcetype(stype)
2412        self.set_selection(basesel)
2413        self._add_history("set_sourcetype", varlist)
2414
2415    @asaplog_post_dec
2416    @preserve_selection
2417    def auto_quotient(self, preserve=True, mode='paired', verify=False):
2418        """\
2419        This function allows to build quotients automatically.
2420        It assumes the observation to have the same number of
2421        "ons" and "offs"
2422
2423        Parameters:
2424
2425            preserve:       you can preserve (default) the continuum or
2426                            remove it.  The equations used are
2427
2428                            preserve: Output = Toff * (on/off) - Toff
2429
2430                            remove:   Output = Toff * (on/off) - Ton
2431
2432            mode:           the on/off detection mode
2433                            'paired' (default)
2434                            identifies 'off' scans by the
2435                            trailing '_R' (Mopra/Parkes) or
2436                            '_e'/'_w' (Tid) and matches
2437                            on/off pairs from the observing pattern
2438                            'time'
2439                            finds the closest off in time
2440
2441        .. todo:: verify argument is not implemented
2442
2443        """
2444        varlist = vars()
2445        modes = ["time", "paired"]
2446        if not mode in modes:
2447            msg = "please provide valid mode. Valid modes are %s" % (modes)
2448            raise ValueError(msg)
2449        s = None
2450        if mode.lower() == "paired":
2451            sel = self.get_selection()
2452            sel.set_query("SRCTYPE==psoff")
2453            self.set_selection(sel)
2454            offs = self.copy()
2455            sel.set_query("SRCTYPE==pson")
2456            self.set_selection(sel)
2457            ons = self.copy()
2458            s = scantable(self._math._quotient(ons, offs, preserve))
2459        elif mode.lower() == "time":
2460            s = scantable(self._math._auto_quotient(self, mode, preserve))
2461        s._add_history("auto_quotient", varlist)
2462        return s
2463
2464    @asaplog_post_dec
2465    def mx_quotient(self, mask = None, weight='median', preserve=True):
2466        """\
2467        Form a quotient using "off" beams when observing in "MX" mode.
2468
2469        Parameters:
2470
2471            mask:           an optional mask to be used when weight == 'stddev'
2472
2473            weight:         How to average the off beams.  Default is 'median'.
2474
2475            preserve:       you can preserve (default) the continuum or
2476                            remove it.  The equations used are:
2477
2478                                preserve: Output = Toff * (on/off) - Toff
2479
2480                                remove:   Output = Toff * (on/off) - Ton
2481
2482        """
2483        mask = mask or ()
2484        varlist = vars()
2485        on = scantable(self._math._mx_extract(self, 'on'))
2486        preoff = scantable(self._math._mx_extract(self, 'off'))
2487        off = preoff.average_time(mask=mask, weight=weight, scanav=False)
2488        from asapmath  import quotient
2489        q = quotient(on, off, preserve)
2490        q._add_history("mx_quotient", varlist)
2491        return q
2492
2493    @asaplog_post_dec
2494    def freq_switch(self, insitu=None):
2495        """\
2496        Apply frequency switching to the data.
2497
2498        Parameters:
2499
2500            insitu:      if False a new scantable is returned.
2501                         Otherwise, the swictching is done in-situ
2502                         The default is taken from .asaprc (False)
2503
2504        """
2505        if insitu is None: insitu = rcParams['insitu']
2506        self._math._setinsitu(insitu)
2507        varlist = vars()
2508        s = scantable(self._math._freqswitch(self))
2509        s._add_history("freq_switch", varlist)
2510        if insitu:
2511            self._assign(s)
2512        else:
2513            return s
2514
2515    @asaplog_post_dec
2516    def recalc_azel(self):
2517        """Recalculate the azimuth and elevation for each position."""
2518        varlist = vars()
2519        self._recalcazel()
2520        self._add_history("recalc_azel", varlist)
2521        return
2522
2523    @asaplog_post_dec
2524    def __add__(self, other):
2525        varlist = vars()
2526        s = None
2527        if isinstance(other, scantable):
2528            s = scantable(self._math._binaryop(self, other, "ADD"))
2529        elif isinstance(other, float):
2530            s = scantable(self._math._unaryop(self, other, "ADD", False))
2531        else:
2532            raise TypeError("Other input is not a scantable or float value")
2533        s._add_history("operator +", varlist)
2534        return s
2535
2536    @asaplog_post_dec
2537    def __sub__(self, other):
2538        """
2539        implicit on all axes and on Tsys
2540        """
2541        varlist = vars()
2542        s = None
2543        if isinstance(other, scantable):
2544            s = scantable(self._math._binaryop(self, other, "SUB"))
2545        elif isinstance(other, float):
2546            s = scantable(self._math._unaryop(self, other, "SUB", False))
2547        else:
2548            raise TypeError("Other input is not a scantable or float value")
2549        s._add_history("operator -", varlist)
2550        return s
2551
2552    @asaplog_post_dec
2553    def __mul__(self, other):
2554        """
2555        implicit on all axes and on Tsys
2556        """
2557        varlist = vars()
2558        s = None
2559        if isinstance(other, scantable):
2560            s = scantable(self._math._binaryop(self, other, "MUL"))
2561        elif isinstance(other, float):
2562            s = scantable(self._math._unaryop(self, other, "MUL", False))
2563        else:
2564            raise TypeError("Other input is not a scantable or float value")
2565        s._add_history("operator *", varlist)
2566        return s
2567
2568
2569    @asaplog_post_dec
2570    def __div__(self, other):
2571        """
2572        implicit on all axes and on Tsys
2573        """
2574        varlist = vars()
2575        s = None
2576        if isinstance(other, scantable):
2577            s = scantable(self._math._binaryop(self, other, "DIV"))
2578        elif isinstance(other, float):
2579            if other == 0.0:
2580                raise ZeroDivisionError("Dividing by zero is not recommended")
2581            s = scantable(self._math._unaryop(self, other, "DIV", False))
2582        else:
2583            raise TypeError("Other input is not a scantable or float value")
2584        s._add_history("operator /", varlist)
2585        return s
2586
2587    @asaplog_post_dec
2588    def get_fit(self, row=0):
2589        """\
2590        Print or return the stored fits for a row in the scantable
2591
2592        Parameters:
2593
2594            row:    the row which the fit has been applied to.
2595
2596        """
2597        if row > self.nrow():
2598            return
2599        from asap.asapfit import asapfit
2600        fit = asapfit(self._getfit(row))
2601        asaplog.push( '%s' %(fit) )
2602        return fit.as_dict()
2603
2604    def flag_nans(self):
2605        """\
2606        Utility function to flag NaN values in the scantable.
2607        """
2608        import numpy
2609        basesel = self.get_selection()
2610        for i in range(self.nrow()):
2611            sel = self.get_row_selector(i)
2612            self.set_selection(basesel+sel)
2613            nans = numpy.isnan(self._getspectrum(0))
2614        if numpy.any(nans):
2615            bnans = [ bool(v) for v in nans]
2616            self.flag(bnans)
2617        self.set_selection(basesel)
2618
2619    def get_row_selector(self, rowno):
2620        #return selector(beams=self.getbeam(rowno),
2621        #                ifs=self.getif(rowno),
2622        #                pols=self.getpol(rowno),
2623        #                scans=self.getscan(rowno),
2624        #                cycles=self.getcycle(rowno))
2625        return selector(rows=[rowno])
2626
2627    def _add_history(self, funcname, parameters):
2628        if not rcParams['scantable.history']:
2629            return
2630        # create date
2631        sep = "##"
2632        from datetime import datetime
2633        dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
2634        hist = dstr+sep
2635        hist += funcname+sep#cdate+sep
2636        if parameters.has_key('self'): del parameters['self']
2637        for k, v in parameters.iteritems():
2638            if type(v) is dict:
2639                for k2, v2 in v.iteritems():
2640                    hist += k2
2641                    hist += "="
2642                    if isinstance(v2, scantable):
2643                        hist += 'scantable'
2644                    elif k2 == 'mask':
2645                        if isinstance(v2, list) or isinstance(v2, tuple):
2646                            hist += str(self._zip_mask(v2))
2647                        else:
2648                            hist += str(v2)
2649                    else:
2650                        hist += str(v2)
2651            else:
2652                hist += k
2653                hist += "="
2654                if isinstance(v, scantable):
2655                    hist += 'scantable'
2656                elif k == 'mask':
2657                    if isinstance(v, list) or isinstance(v, tuple):
2658                        hist += str(self._zip_mask(v))
2659                    else:
2660                        hist += str(v)
2661                else:
2662                    hist += str(v)
2663            hist += sep
2664        hist = hist[:-2] # remove trailing '##'
2665        self._addhistory(hist)
2666
2667
2668    def _zip_mask(self, mask):
2669        mask = list(mask)
2670        i = 0
2671        segments = []
2672        while mask[i:].count(1):
2673            i += mask[i:].index(1)
2674            if mask[i:].count(0):
2675                j = i + mask[i:].index(0)
2676            else:
2677                j = len(mask)
2678            segments.append([i, j])
2679            i = j
2680        return segments
2681
2682    def _get_ordinate_label(self):
2683        fu = "("+self.get_fluxunit()+")"
2684        import re
2685        lbl = "Intensity"
2686        if re.match(".K.", fu):
2687            lbl = "Brightness Temperature "+ fu
2688        elif re.match(".Jy.", fu):
2689            lbl = "Flux density "+ fu
2690        return lbl
2691
2692    def _check_ifs(self):
2693        #nchans = [self.nchan(i) for i in range(self.nif(-1))]
2694        nchans = [self.nchan(i) for i in self.getifnos()]
2695        nchans = filter(lambda t: t > 0, nchans)
2696        return (sum(nchans)/len(nchans) == nchans[0])
2697
2698    @asaplog_post_dec
2699    #def _fill(self, names, unit, average, getpt, antenna):
2700    def _fill(self, names, unit, average, opts={}):
2701        first = True
2702        fullnames = []
2703        for name in names:
2704            name = os.path.expandvars(name)
2705            name = os.path.expanduser(name)
2706            if not os.path.exists(name):
2707                msg = "File '%s' does not exists" % (name)
2708                raise IOError(msg)
2709            fullnames.append(name)
2710        if average:
2711            asaplog.push('Auto averaging integrations')
2712        stype = int(rcParams['scantable.storage'].lower() == 'disk')
2713        for name in fullnames:
2714            tbl = Scantable(stype)
2715            if is_ms( name ):
2716                r = msfiller( tbl )
2717            else:
2718                r = filler( tbl )
2719                rx = rcParams['scantable.reference']
2720                r.setreferenceexpr(rx)
2721            #r = filler(tbl)
2722            #rx = rcParams['scantable.reference']
2723            #r.setreferenceexpr(rx)
2724            msg = "Importing %s..." % (name)
2725            asaplog.push(msg, False)
2726            #opts = {'ms': {'antenna' : antenna, 'getpt': getpt} }
2727            r.open(name, opts)# antenna, -1, -1, getpt)
2728            r.fill()
2729            if average:
2730                tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
2731            if not first:
2732                tbl = self._math._merge([self, tbl])
2733            Scantable.__init__(self, tbl)
2734            r.close()
2735            del r, tbl
2736            first = False
2737            #flush log
2738        asaplog.post()
2739        if unit is not None:
2740            self.set_fluxunit(unit)
2741        if not is_casapy():
2742            self.set_freqframe(rcParams['scantable.freqframe'])
2743
2744    def __getitem__(self, key):
2745        if key < 0:
2746            key += self.nrow()
2747        if key >= self.nrow():
2748            raise IndexError("Row index out of range.")
2749        return self._getspectrum(key)
2750
2751    def __setitem__(self, key, value):
2752        if key < 0:
2753            key += self.nrow()
2754        if key >= self.nrow():
2755            raise IndexError("Row index out of range.")
2756        if not hasattr(value, "__len__") or \
2757                len(value) > self.nchan(self.getif(key)):
2758            raise ValueError("Spectrum length doesn't match.")
2759        return self._setspectrum(value, key)
2760
2761    def __len__(self):
2762        return self.nrow()
2763
2764    def __iter__(self):
2765        for i in range(len(self)):
2766            yield self[i]
Note: See TracBrowser for help on using the repository browser.