source: trunk/python/scantable.py @ 1947

Last change on this file since 1947 was 1947, checked in by Kana Sugimoto, 13 years ago

New Development: No

JIRA Issue: No

Ready for Test: Yes

Interface Changes: Yes

What Interface Changed:
+ An optional parameter 'prec (unsigned int)' is added to

scantable.get_time, python_Scantable::_gettime, ScantableWrapper::getTime and Scantable::getTime.

+ Also Scantable::fromatTime accepts 'prec' as a parameter.
+ scantable._get_column accepts args which will be passed to callback function.

Test Programs:

Put in Release Notes: No

Module(s): scantable

Description:

Add a parameter prec to scantable.get_time which specifies the precision of time returned.
The default value is prec=0.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 93.0 KB
Line 
1"""This module defines the scantable class."""
2
3import os
4try:
5    from functools import wraps as wraps_dec
6except ImportError:
7    from asap.compatibility import wraps as wraps_dec
8
9from asap.env import is_casapy
10from asap._asap import Scantable
11from asap._asap import filler
12from asap.parameters import rcParams
13from asap.logging import asaplog, asaplog_post_dec
14from asap.selector import selector
15from asap.linecatalog import linecatalog
16from asap.coordinate import coordinate
17from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
18from asap.asapfitter import fitter
19
20
21def preserve_selection(func):
22    @wraps_dec(func)
23    def wrap(obj, *args, **kw):
24        basesel = obj.get_selection()
25        try:
26            val = func(obj, *args, **kw)
27        finally:
28            obj.set_selection(basesel)
29        return val
30    return wrap
31
32def is_scantable(filename):
33    """Is the given file a scantable?
34
35    Parameters:
36
37        filename: the name of the file/directory to test
38
39    """
40    if ( os.path.isdir(filename)
41         and os.path.exists(filename+'/table.info')
42         and os.path.exists(filename+'/table.dat') ):
43        f=open(filename+'/table.info')
44        l=f.readline()
45        f.close()
46        #if ( l.find('Scantable') != -1 ):
47        if ( l.find('Measurement Set') == -1 ):
48            return True
49        else:
50            return False
51    else:
52        return False
53##     return (os.path.isdir(filename)
54##             and not os.path.exists(filename+'/table.f1')
55##             and os.path.exists(filename+'/table.info'))
56
57def is_ms(filename):
58    """Is the given file a MeasurementSet?
59
60    Parameters:
61
62        filename: the name of the file/directory to test
63
64    """
65    if ( os.path.isdir(filename)
66         and os.path.exists(filename+'/table.info')
67         and os.path.exists(filename+'/table.dat') ):
68        f=open(filename+'/table.info')
69        l=f.readline()
70        f.close()
71        if ( l.find('Measurement Set') != -1 ):
72            return True
73        else:
74            return False
75    else:
76        return False
77   
78class scantable(Scantable):
79    """\
80        The ASAP container for scans (single-dish data).
81    """
82
83    @asaplog_post_dec
84    #def __init__(self, filename, average=None, unit=None, getpt=None,
85    #             antenna=None, parallactify=None):
86    def __init__(self, filename, average=None, unit=None, parallactify=None, **args):
87        """\
88        Create a scantable from a saved one or make a reference
89
90        Parameters:
91
92            filename:     the name of an asap table on disk
93                          or
94                          the name of a rpfits/sdfits/ms file
95                          (integrations within scans are auto averaged
96                          and the whole file is read) or
97                          [advanced] a reference to an existing scantable
98
99            average:      average all integrations withinb a scan on read.
100                          The default (True) is taken from .asaprc.
101
102            unit:         brightness unit; must be consistent with K or Jy.
103                          Over-rides the default selected by the filler
104                          (input rpfits/sdfits/ms) or replaces the value
105                          in existing scantables
106
107            getpt:        for MeasurementSet input data only:
108                          If True, all pointing data are filled.
109                          The deafult is False, which makes time to load
110                          the MS data faster in some cases.
111
112            antenna:      for MeasurementSet input data only:
113                          Antenna selection. integer (id) or string (name or id).
114
115            parallactify: Indicate that the data had been parallatified. Default
116                          is taken from rc file.
117
118        """
119        if average is None:
120            average = rcParams['scantable.autoaverage']
121        #if getpt is None:
122        #    getpt = True
123        #if antenna is not None:
124        #    asaplog.push("Antenna selection currently unsupported."
125        #                 "Using ''")
126        #    asaplog.post('WARN')
127        #if antenna is None:
128        #    antenna = ''
129        #elif type(antenna) == int:
130        #    antenna = '%s' % antenna
131        #elif type(antenna) == list:
132        #    tmpstr = ''
133        #    for i in range( len(antenna) ):
134        #        if type(antenna[i]) == int:
135        #            tmpstr = tmpstr + ('%s,'%(antenna[i]))
136        #        elif type(antenna[i]) == str:
137        #            tmpstr=tmpstr+antenna[i]+','
138        #        else:
139        #            raise TypeError('Bad antenna selection.')
140        #    antenna = tmpstr.rstrip(',')
141        parallactify = parallactify or rcParams['scantable.parallactify']
142        varlist = vars()
143        from asap._asap import stmath
144        self._math = stmath( rcParams['insitu'] )
145        if isinstance(filename, Scantable):
146            Scantable.__init__(self, filename)
147        else:
148            if isinstance(filename, str):
149                filename = os.path.expandvars(filename)
150                filename = os.path.expanduser(filename)
151                if not os.path.exists(filename):
152                    s = "File '%s' not found." % (filename)
153                    raise IOError(s)
154                if is_scantable(filename):
155                    ondisk = rcParams['scantable.storage'] == 'disk'
156                    Scantable.__init__(self, filename, ondisk)
157                    if unit is not None:
158                        self.set_fluxunit(unit)
159                    # do not reset to the default freqframe
160                    #self.set_freqframe(rcParams['scantable.freqframe'])
161                #elif os.path.isdir(filename) \
162                #         and not os.path.exists(filename+'/table.f1'):
163                elif is_ms(filename):
164                    # Measurement Set
165                    opts={'ms': {}}
166                    mskeys=['getpt','antenna']
167                    for key in mskeys:
168                        if key in args.keys():
169                            opts['ms'][key] = args[key]
170                    #self._fill([filename], unit, average, getpt, antenna)
171                    self._fill([filename], unit, average, opts)
172                elif os.path.isfile(filename):
173                    #self._fill([filename], unit, average, getpt, antenna)
174                    self._fill([filename], unit, average)
175                else:
176                    msg = "The given file '%s'is not a valid " \
177                          "asap table." % (filename)
178                    raise IOError(msg)
179            elif (isinstance(filename, list) or isinstance(filename, tuple)) \
180                  and isinstance(filename[-1], str):
181                #self._fill(filename, unit, average, getpt, antenna)
182                self._fill(filename, unit, average)
183        self.parallactify(parallactify)
184        self._add_history("scantable", varlist)
185
186    @asaplog_post_dec
187    def save(self, name=None, format=None, overwrite=False):
188        """\
189        Store the scantable on disk. This can be an asap (aips++) Table,
190        SDFITS or MS2 format.
191
192        Parameters:
193
194            name:        the name of the outputfile. For format "ASCII"
195                         this is the root file name (data in 'name'.txt
196                         and header in 'name'_header.txt)
197
198            format:      an optional file format. Default is ASAP.
199                         Allowed are:
200
201                            * 'ASAP' (save as ASAP [aips++] Table),
202                            * 'SDFITS' (save as SDFITS file)
203                            * 'ASCII' (saves as ascii text file)
204                            * 'MS2' (saves as an casacore MeasurementSet V2)
205                            * 'FITS' (save as image FITS - not readable by class)
206                            * 'CLASS' (save as FITS readable by CLASS)
207
208            overwrite:   If the file should be overwritten if it exists.
209                         The default False is to return with warning
210                         without writing the output. USE WITH CARE.
211
212        Example::
213
214            scan.save('myscan.asap')
215            scan.save('myscan.sdfits', 'SDFITS')
216
217        """
218        from os import path
219        format = format or rcParams['scantable.save']
220        suffix = '.'+format.lower()
221        if name is None or name == "":
222            name = 'scantable'+suffix
223            msg = "No filename given. Using default name %s..." % name
224            asaplog.push(msg)
225        name = path.expandvars(name)
226        if path.isfile(name) or path.isdir(name):
227            if not overwrite:
228                msg = "File %s exists." % name
229                raise IOError(msg)
230        format2 = format.upper()
231        if format2 == 'ASAP':
232            self._save(name)
233        else:
234            from asap._asap import stwriter as stw
235            writer = stw(format2)
236            writer.write(self, name)
237        return
238
239    def copy(self):
240        """Return a copy of this scantable.
241
242        *Note*:
243
244            This makes a full (deep) copy. scan2 = scan1 makes a reference.
245
246        Example::
247
248            copiedscan = scan.copy()
249
250        """
251        sd = scantable(Scantable._copy(self))
252        return sd
253
254    def drop_scan(self, scanid=None):
255        """\
256        Return a new scantable where the specified scan number(s) has(have)
257        been dropped.
258
259        Parameters:
260
261            scanid:    a (list of) scan number(s)
262
263        """
264        from asap import _is_sequence_or_number as _is_valid
265        from asap import _to_list
266        from asap import unique
267        if not _is_valid(scanid):
268            raise RuntimeError( 'Please specify a scanno to drop from the scantable' )
269        scanid = _to_list(scanid)
270        allscans = unique([ self.getscan(i) for i in range(self.nrow())])
271        for sid in scanid: allscans.remove(sid)
272        if len(allscans) == 0:
273            raise ValueError("Can't remove all scans")
274        sel = selector(scans=allscans)
275        return self._select_copy(sel)
276
277    def _select_copy(self, selection):
278        orig = self.get_selection()
279        self.set_selection(orig+selection)
280        cp = self.copy()
281        self.set_selection(orig)
282        return cp
283
284    def get_scan(self, scanid=None):
285        """\
286        Return a specific scan (by scanno) or collection of scans (by
287        source name) in a new scantable.
288
289        *Note*:
290
291            See scantable.drop_scan() for the inverse operation.
292
293        Parameters:
294
295            scanid:    a (list of) scanno or a source name, unix-style
296                       patterns are accepted for source name matching, e.g.
297                       '*_R' gets all 'ref scans
298
299        Example::
300
301            # get all scans containing the source '323p459'
302            newscan = scan.get_scan('323p459')
303            # get all 'off' scans
304            refscans = scan.get_scan('*_R')
305            # get a susbset of scans by scanno (as listed in scan.summary())
306            newscan = scan.get_scan([0, 2, 7, 10])
307
308        """
309        if scanid is None:
310            raise RuntimeError( 'Please specify a scan no or name to '
311                                'retrieve from the scantable' )
312        try:
313            bsel = self.get_selection()
314            sel = selector()
315            if type(scanid) is str:
316                sel.set_name(scanid)
317                return self._select_copy(sel)
318            elif type(scanid) is int:
319                sel.set_scans([scanid])
320                return self._select_copy(sel)
321            elif type(scanid) is list:
322                sel.set_scans(scanid)
323                return self._select_copy(sel)
324            else:
325                msg = "Illegal scanid type, use 'int' or 'list' if ints."
326                raise TypeError(msg)
327        except RuntimeError:
328            raise
329
330    def __str__(self):
331        return Scantable._summary(self, True)
332
333    def summary(self, filename=None):
334        """\
335        Print a summary of the contents of this scantable.
336
337        Parameters:
338
339            filename:    the name of a file to write the putput to
340                         Default - no file output
341
342        """
343        info = Scantable._summary(self, True)
344        if filename is not None:
345            if filename is "":
346                filename = 'scantable_summary.txt'
347            from os.path import expandvars, isdir
348            filename = expandvars(filename)
349            if not isdir(filename):
350                data = open(filename, 'w')
351                data.write(info)
352                data.close()
353            else:
354                msg = "Illegal file name '%s'." % (filename)
355                raise IOError(msg)
356        return page(info)
357
358    def get_spectrum(self, rowno):
359        """Return the spectrum for the current row in the scantable as a list.
360
361        Parameters:
362
363             rowno:   the row number to retrieve the spectrum from
364
365        """
366        return self._getspectrum(rowno)
367
368    def get_mask(self, rowno):
369        """Return the mask for the current row in the scantable as a list.
370
371        Parameters:
372
373             rowno:   the row number to retrieve the mask from
374
375        """
376        return self._getmask(rowno)
377
378    def set_spectrum(self, spec, rowno):
379        """Set the spectrum for the current row in the scantable.
380
381        Parameters:
382
383             spec:   the new spectrum
384
385             rowno:  the row number to set the spectrum for
386
387        """
388        assert(len(spec) == self.nchan())
389        return self._setspectrum(spec, rowno)
390
391    def get_coordinate(self, rowno):
392        """Return the (spectral) coordinate for a a given 'rowno'.
393
394        *Note*:
395
396            * This coordinate is only valid until a scantable method modifies
397              the frequency axis.
398            * This coordinate does contain the original frequency set-up
399              NOT the new frame. The conversions however are done using the user
400              specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
401              use scantable.freq_align first. Without it there is no closure,
402              i.e.::
403
404                  c = myscan.get_coordinate(0)
405                  c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
406
407        Parameters:
408
409             rowno:    the row number for the spectral coordinate
410
411        """
412        return coordinate(Scantable.get_coordinate(self, rowno))
413
414    def get_selection(self):
415        """\
416        Get the selection object currently set on this scantable.
417
418        Example::
419
420            sel = scan.get_selection()
421            sel.set_ifs(0)              # select IF 0
422            scan.set_selection(sel)     # apply modified selection
423
424        """
425        return selector(self._getselection())
426
427    def set_selection(self, selection=None, **kw):
428        """\
429        Select a subset of the data. All following operations on this scantable
430        are only applied to thi selection.
431
432        Parameters:
433
434            selection:    a selector object (default unset the selection), or
435                          any combination of "pols", "ifs", "beams", "scans",
436                          "cycles", "name", "query"
437
438        Examples::
439
440            sel = selector()         # create a selection object
441            self.set_scans([0, 3])    # select SCANNO 0 and 3
442            scan.set_selection(sel)  # set the selection
443            scan.summary()           # will only print summary of scanno 0 an 3
444            scan.set_selection()     # unset the selection
445            # or the equivalent
446            scan.set_selection(scans=[0,3])
447            scan.summary()           # will only print summary of scanno 0 an 3
448            scan.set_selection()     # unset the selection
449
450        """
451        if selection is None:
452            # reset
453            if len(kw) == 0:
454                selection = selector()
455            else:
456                # try keywords
457                for k in kw:
458                    if k not in selector.fields:
459                        raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
460                selection = selector(**kw)
461        self._setselection(selection)
462
463    def get_row(self, row=0, insitu=None):
464        """\
465        Select a row in the scantable.
466        Return a scantable with single row.
467
468        Parameters:
469
470            row:    row no of integration, default is 0.
471            insitu: if False a new scantable is returned. Otherwise, the
472                    scaling is done in-situ. The default is taken from .asaprc
473                    (False)
474
475        """
476        if insitu is None: insitu = rcParams['insitu']
477        if not insitu:
478            workscan = self.copy()
479        else:
480            workscan = self
481        # Select a row
482        sel=selector()
483        sel.set_scans([workscan.getscan(row)])
484        sel.set_cycles([workscan.getcycle(row)])
485        sel.set_beams([workscan.getbeam(row)])
486        sel.set_ifs([workscan.getif(row)])
487        sel.set_polarisations([workscan.getpol(row)])
488        sel.set_name(workscan._getsourcename(row))
489        workscan.set_selection(sel)
490        if not workscan.nrow() == 1:
491            msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
492            raise RuntimeError(msg)
493        del sel
494        if insitu:
495            self._assign(workscan)
496        else:
497            return workscan
498
499    @asaplog_post_dec
500    def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
501        """\
502        Determine the specified statistic of the current beam/if/pol
503        Takes a 'mask' as an optional parameter to specify which
504        channels should be excluded.
505
506        Parameters:
507
508            stat:    'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
509                     'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
510
511            mask:    an optional mask specifying where the statistic
512                     should be determined.
513
514            form:    format string to print statistic values
515
516            row:     row number of spectrum to process.
517                     (default is None: for all rows)
518
519        Example:
520            scan.set_unit('channel')
521            msk = scan.create_mask([100, 200], [500, 600])
522            scan.stats(stat='mean', mask=m)
523
524        """
525        mask = mask or []
526        if not self._check_ifs():
527            raise ValueError("Cannot apply mask as the IFs have different "
528                             "number of channels. Please use setselection() "
529                             "to select individual IFs")
530        rtnabc = False
531        if stat.lower().endswith('_abc'): rtnabc = True
532        getchan = False
533        if stat.lower().startswith('min') or stat.lower().startswith('max'):
534            chan = self._math._minmaxchan(self, mask, stat)
535            getchan = True
536            statvals = []
537        if not rtnabc:
538            if row == None:
539                statvals = self._math._stats(self, mask, stat)
540            else:
541                statvals = self._math._statsrow(self, mask, stat, int(row))
542
543        #def cb(i):
544        #    return statvals[i]
545
546        #return self._row_callback(cb, stat)
547
548        label=stat
549        #callback=cb
550        out = ""
551        #outvec = []
552        sep = '-'*50
553
554        if row == None:
555            rows = xrange(self.nrow())
556        elif isinstance(row, int):
557            rows = [ row ]
558
559        for i in rows:
560            refstr = ''
561            statunit= ''
562            if getchan:
563                qx, qy = self.chan2data(rowno=i, chan=chan[i])
564                if rtnabc:
565                    statvals.append(qx['value'])
566                    refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
567                    statunit= '['+qx['unit']+']'
568                else:
569                    refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
570
571            tm = self._gettime(i)
572            src = self._getsourcename(i)
573            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
574            out += 'Time[%s]:\n' % (tm)
575            if self.nbeam(-1) > 1: out +=  ' Beam[%d] ' % (self.getbeam(i))
576            if self.nif(-1) > 1:   out +=  ' IF[%d] ' % (self.getif(i))
577            if self.npol(-1) > 1:  out +=  ' Pol[%d] ' % (self.getpol(i))
578            #outvec.append(callback(i))
579            if len(rows) > 1:
580                # out += ('= %'+form) % (outvec[i]) +'   '+refstr+'\n'
581                out += ('= %'+form) % (statvals[i]) +'   '+refstr+'\n'
582            else:
583                # out += ('= %'+form) % (outvec[0]) +'   '+refstr+'\n'
584                out += ('= %'+form) % (statvals[0]) +'   '+refstr+'\n'
585            out +=  sep+"\n"
586
587        import os
588        if os.environ.has_key( 'USER' ):
589            usr = os.environ['USER']
590        else:
591            import commands
592            usr = commands.getoutput( 'whoami' )
593        tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
594        f = open(tmpfile,'w')
595        print >> f, sep
596        print >> f, ' %s %s' % (label, statunit)
597        print >> f, sep
598        print >> f, out
599        f.close()
600        f = open(tmpfile,'r')
601        x = f.readlines()
602        f.close()
603        asaplog.push(''.join(x), False)
604
605        return statvals
606
607    def chan2data(self, rowno=0, chan=0):
608        """\
609        Returns channel/frequency/velocity and spectral value
610        at an arbitrary row and channel in the scantable.
611
612        Parameters:
613
614            rowno:   a row number in the scantable. Default is the
615                     first row, i.e. rowno=0
616
617            chan:    a channel in the scantable. Default is the first
618                     channel, i.e. pos=0
619
620        """
621        if isinstance(rowno, int) and isinstance(chan, int):
622            qx = {'unit': self.get_unit(),
623                  'value': self._getabcissa(rowno)[chan]}
624            qy = {'unit': self.get_fluxunit(),
625                  'value': self._getspectrum(rowno)[chan]}
626            return qx, qy
627
628    def stddev(self, mask=None):
629        """\
630        Determine the standard deviation of the current beam/if/pol
631        Takes a 'mask' as an optional parameter to specify which
632        channels should be excluded.
633
634        Parameters:
635
636            mask:    an optional mask specifying where the standard
637                     deviation should be determined.
638
639        Example::
640
641            scan.set_unit('channel')
642            msk = scan.create_mask([100, 200], [500, 600])
643            scan.stddev(mask=m)
644
645        """
646        return self.stats(stat='stddev', mask=mask);
647
648
649    def get_column_names(self):
650        """\
651        Return a  list of column names, which can be used for selection.
652        """
653        return list(Scantable.get_column_names(self))
654
655    def get_tsys(self, row=-1):
656        """\
657        Return the System temperatures.
658
659        Parameters:
660
661            row:    the rowno to get the information for. (default all rows)
662
663        Returns:
664
665            a list of Tsys values for the current selection
666
667        """
668        if row > -1:
669            return self._get_column(self._gettsys, row)
670        return self._row_callback(self._gettsys, "Tsys")
671
672
673    def get_weather(self, row=-1):
674        """\
675        Return the weather informations.
676
677        Parameters:
678
679            row:    the rowno to get the information for. (default all rows)
680
681        Returns:
682
683            a dict or list of of dicts of values for the current selection
684
685        """
686
687        values = self._get_column(self._get_weather, row)
688        if row > -1:
689            return {'temperature': values[0],
690                    'pressure': values[1], 'humidity' : values[2],
691                    'windspeed' : values[3], 'windaz' : values[4]
692                    }
693        else:
694            out = []
695            for r in values:
696
697                out.append({'temperature': r[0],
698                            'pressure': r[1], 'humidity' : r[2],
699                            'windspeed' : r[3], 'windaz' : r[4]
700                    })
701            return out
702
703    def _row_callback(self, callback, label):
704        out = ""
705        outvec = []
706        sep = '-'*50
707        for i in range(self.nrow()):
708            tm = self._gettime(i)
709            src = self._getsourcename(i)
710            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
711            out += 'Time[%s]:\n' % (tm)
712            if self.nbeam(-1) > 1:
713                out +=  ' Beam[%d] ' % (self.getbeam(i))
714            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (self.getif(i))
715            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (self.getpol(i))
716            outvec.append(callback(i))
717            out += '= %3.3f\n' % (outvec[i])
718            out +=  sep+'\n'
719
720        asaplog.push(sep)
721        asaplog.push(" %s" % (label))
722        asaplog.push(sep)
723        asaplog.push(out)
724        asaplog.post()
725        return outvec
726
727    def _get_column(self, callback, row=-1, *args):
728        """
729        """
730        if row == -1:
731            return [callback(i, *args) for i in range(self.nrow())]
732        else:
733            if  0 <= row < self.nrow():
734                return callback(row, *args)
735
736
737    def get_time(self, row=-1, asdatetime=False, prec=0):
738        """\
739        Get a list of time stamps for the observations.
740        Return a datetime object or a string (default) for each
741        integration time stamp in the scantable.
742
743        Parameters:
744
745            row:          row no of integration. Default -1 return all rows
746
747            asdatetime:   return values as datetime objects rather than strings
748
749            prec:         number of digits shown. Note this number is equals to
750                          the digits of MVTime, i.e., 0<prec<3: dates with hh::
751                          only, <5: with hh:mm:, <7 or 0: with hh:mm:ss,
752                          and 6> : with hh:mm:ss.tt... (prec-6 t's added)
753
754        """
755        #from time import strptime
756        from datetime import datetime
757        times = self._get_column(self._gettime, row, prec)
758        if not asdatetime:
759            return times
760        #format = "%Y/%m/%d/%H:%M:%S"
761        format = "%Y/%m/%d/%H:%M:%S.%f"
762        if prec < 7:
763            nsub = 1 + (((6-prec)/2) % 3)
764            substr = [".%f","%S","%M"]
765            for i in range(nsub):
766                format = format.replace(substr[i],"")
767        if isinstance(times, list):
768            #return [datetime(*strptime(i, format)[:6]) for i in times]
769            return [datetime.strptime(i, format) for i in times]
770        else:
771            #return datetime(*strptime(times, format)[:6])
772            return datetime.strptime(times, format)
773
774
775    def get_inttime(self, row=-1):
776        """\
777        Get a list of integration times for the observations.
778        Return a time in seconds for each integration in the scantable.
779
780        Parameters:
781
782            row:    row no of integration. Default -1 return all rows.
783
784        """
785        return self._get_column(self._getinttime, row)
786
787
788    def get_sourcename(self, row=-1):
789        """\
790        Get a list source names for the observations.
791        Return a string for each integration in the scantable.
792        Parameters:
793
794            row:    row no of integration. Default -1 return all rows.
795
796        """
797        return self._get_column(self._getsourcename, row)
798
799    def get_elevation(self, row=-1):
800        """\
801        Get a list of elevations for the observations.
802        Return a float for each integration in the scantable.
803
804        Parameters:
805
806            row:    row no of integration. Default -1 return all rows.
807
808        """
809        return self._get_column(self._getelevation, row)
810
811    def get_azimuth(self, row=-1):
812        """\
813        Get a list of azimuths for the observations.
814        Return a float for each integration in the scantable.
815
816        Parameters:
817            row:    row no of integration. Default -1 return all rows.
818
819        """
820        return self._get_column(self._getazimuth, row)
821
822    def get_parangle(self, row=-1):
823        """\
824        Get a list of parallactic angles for the observations.
825        Return a float for each integration in the scantable.
826
827        Parameters:
828
829            row:    row no of integration. Default -1 return all rows.
830
831        """
832        return self._get_column(self._getparangle, row)
833
834    def get_direction(self, row=-1):
835        """
836        Get a list of Positions on the sky (direction) for the observations.
837        Return a string for each integration in the scantable.
838
839        Parameters:
840
841            row:    row no of integration. Default -1 return all rows
842
843        """
844        return self._get_column(self._getdirection, row)
845
846    def get_directionval(self, row=-1):
847        """\
848        Get a list of Positions on the sky (direction) for the observations.
849        Return a float for each integration in the scantable.
850
851        Parameters:
852
853            row:    row no of integration. Default -1 return all rows
854
855        """
856        return self._get_column(self._getdirectionvec, row)
857
858    @asaplog_post_dec
859    def set_unit(self, unit='channel'):
860        """\
861        Set the unit for all following operations on this scantable
862
863        Parameters:
864
865            unit:    optional unit, default is 'channel'. Use one of '*Hz',
866                     'km/s', 'channel' or equivalent ''
867
868        """
869        varlist = vars()
870        if unit in ['', 'pixel', 'channel']:
871            unit = ''
872        inf = list(self._getcoordinfo())
873        inf[0] = unit
874        self._setcoordinfo(inf)
875        self._add_history("set_unit", varlist)
876
877    @asaplog_post_dec
878    def set_instrument(self, instr):
879        """\
880        Set the instrument for subsequent processing.
881
882        Parameters:
883
884            instr:    Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
885                      'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
886
887        """
888        self._setInstrument(instr)
889        self._add_history("set_instument", vars())
890
891    @asaplog_post_dec
892    def set_feedtype(self, feedtype):
893        """\
894        Overwrite the feed type, which might not be set correctly.
895
896        Parameters:
897
898            feedtype:     'linear' or 'circular'
899
900        """
901        self._setfeedtype(feedtype)
902        self._add_history("set_feedtype", vars())
903
904    @asaplog_post_dec
905    def set_doppler(self, doppler='RADIO'):
906        """\
907        Set the doppler for all following operations on this scantable.
908
909        Parameters:
910
911            doppler:    One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
912
913        """
914        varlist = vars()
915        inf = list(self._getcoordinfo())
916        inf[2] = doppler
917        self._setcoordinfo(inf)
918        self._add_history("set_doppler", vars())
919
920    @asaplog_post_dec
921    def set_freqframe(self, frame=None):
922        """\
923        Set the frame type of the Spectral Axis.
924
925        Parameters:
926
927            frame:   an optional frame type, default 'LSRK'. Valid frames are:
928                     'TOPO', 'LSRD', 'LSRK', 'BARY',
929                     'GEO', 'GALACTO', 'LGROUP', 'CMB'
930
931        Example::
932
933            scan.set_freqframe('BARY')
934
935        """
936        frame = frame or rcParams['scantable.freqframe']
937        varlist = vars()
938        # "REST" is not implemented in casacore
939        #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
940        #           'GEO', 'GALACTO', 'LGROUP', 'CMB']
941        valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
942                   'GEO', 'GALACTO', 'LGROUP', 'CMB']
943
944        if frame in valid:
945            inf = list(self._getcoordinfo())
946            inf[1] = frame
947            self._setcoordinfo(inf)
948            self._add_history("set_freqframe", varlist)
949        else:
950            msg  = "Please specify a valid freq type. Valid types are:\n", valid
951            raise TypeError(msg)
952
953    @asaplog_post_dec
954    def set_dirframe(self, frame=""):
955        """\
956        Set the frame type of the Direction on the sky.
957
958        Parameters:
959
960            frame:   an optional frame type, default ''. Valid frames are:
961                     'J2000', 'B1950', 'GALACTIC'
962
963        Example:
964
965            scan.set_dirframe('GALACTIC')
966
967        """
968        varlist = vars()
969        Scantable.set_dirframe(self, frame)
970        self._add_history("set_dirframe", varlist)
971
972    def get_unit(self):
973        """\
974        Get the default unit set in this scantable
975
976        Returns:
977
978            A unit string
979
980        """
981        inf = self._getcoordinfo()
982        unit = inf[0]
983        if unit == '': unit = 'channel'
984        return unit
985
986    @asaplog_post_dec
987    def get_abcissa(self, rowno=0):
988        """\
989        Get the abcissa in the current coordinate setup for the currently
990        selected Beam/IF/Pol
991
992        Parameters:
993
994            rowno:    an optional row number in the scantable. Default is the
995                      first row, i.e. rowno=0
996
997        Returns:
998
999            The abcissa values and the format string (as a dictionary)
1000
1001        """
1002        abc = self._getabcissa(rowno)
1003        lbl = self._getabcissalabel(rowno)
1004        return abc, lbl
1005
1006    @asaplog_post_dec
1007    def flag(self, mask=None, unflag=False):
1008        """\
1009        Flag the selected data using an optional channel mask.
1010
1011        Parameters:
1012
1013            mask:   an optional channel mask, created with create_mask. Default
1014                    (no mask) is all channels.
1015
1016            unflag:    if True, unflag the data
1017
1018        """
1019        varlist = vars()
1020        mask = mask or []
1021        self._flag(mask, unflag)
1022        self._add_history("flag", varlist)
1023
1024    @asaplog_post_dec
1025    def flag_row(self, rows=[], unflag=False):
1026        """\
1027        Flag the selected data in row-based manner.
1028
1029        Parameters:
1030
1031            rows:   list of row numbers to be flagged. Default is no row
1032                    (must be explicitly specified to execute row-based flagging).
1033
1034            unflag: if True, unflag the data.
1035
1036        """
1037        varlist = vars()
1038        self._flag_row(rows, unflag)
1039        self._add_history("flag_row", varlist)
1040
1041    @asaplog_post_dec
1042    def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
1043        """\
1044        Flag the selected data outside a specified range (in channel-base)
1045
1046        Parameters:
1047
1048            uthres:      upper threshold.
1049
1050            dthres:      lower threshold
1051
1052            clipoutside: True for flagging data outside the range [dthres:uthres].
1053                         False for flagging data inside the range.
1054
1055            unflag:      if True, unflag the data.
1056
1057        """
1058        varlist = vars()
1059        self._clip(uthres, dthres, clipoutside, unflag)
1060        self._add_history("clip", varlist)
1061
1062    @asaplog_post_dec
1063    def lag_flag(self, start, end, unit="MHz", insitu=None):
1064        """\
1065        Flag the data in 'lag' space by providing a frequency to remove.
1066        Flagged data in the scantable gets interpolated over the region.
1067        No taper is applied.
1068
1069        Parameters:
1070
1071            start:    the start frequency (really a period within the
1072                      bandwidth)  or period to remove
1073
1074            end:      the end frequency or period to remove
1075
1076            unit:     the frequency unit (default "MHz") or "" for
1077                      explicit lag channels
1078
1079        *Notes*:
1080
1081            It is recommended to flag edges of the band or strong
1082            signals beforehand.
1083
1084        """
1085        if insitu is None: insitu = rcParams['insitu']
1086        self._math._setinsitu(insitu)
1087        varlist = vars()
1088        base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1089        if not (unit == "" or base.has_key(unit)):
1090            raise ValueError("%s is not a valid unit." % unit)
1091        if unit == "":
1092            s = scantable(self._math._lag_flag(self, start, end, "lags"))
1093        else:
1094            s = scantable(self._math._lag_flag(self, start*base[unit],
1095                                               end*base[unit], "frequency"))
1096        s._add_history("lag_flag", varlist)
1097        if insitu:
1098            self._assign(s)
1099        else:
1100            return s
1101
1102    @asaplog_post_dec
1103    def create_mask(self, *args, **kwargs):
1104        """\
1105        Compute and return a mask based on [min, max] windows.
1106        The specified windows are to be INCLUDED, when the mask is
1107        applied.
1108
1109        Parameters:
1110
1111            [min, max], [min2, max2], ...
1112                Pairs of start/end points (inclusive)specifying the regions
1113                to be masked
1114
1115            invert:     optional argument. If specified as True,
1116                        return an inverted mask, i.e. the regions
1117                        specified are EXCLUDED
1118
1119            row:        create the mask using the specified row for
1120                        unit conversions, default is row=0
1121                        only necessary if frequency varies over rows.
1122
1123        Examples::
1124
1125            scan.set_unit('channel')
1126            # a)
1127            msk = scan.create_mask([400, 500], [800, 900])
1128            # masks everything outside 400 and 500
1129            # and 800 and 900 in the unit 'channel'
1130
1131            # b)
1132            msk = scan.create_mask([400, 500], [800, 900], invert=True)
1133            # masks the regions between 400 and 500
1134            # and 800 and 900 in the unit 'channel'
1135
1136            # c)
1137            #mask only channel 400
1138            msk =  scan.create_mask([400])
1139
1140        """
1141        row = kwargs.get("row", 0)
1142        data = self._getabcissa(row)
1143        u = self._getcoordinfo()[0]
1144        if u == "":
1145            u = "channel"
1146        msg = "The current mask window unit is %s" % u
1147        i = self._check_ifs()
1148        if not i:
1149            msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1150        asaplog.push(msg)
1151        n = self.nchan()
1152        msk = _n_bools(n, False)
1153        # test if args is a 'list' or a 'normal *args - UGLY!!!
1154
1155        ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1156             and args or args[0]
1157        for window in ws:
1158            if len(window) == 1:
1159                window = [window[0], window[0]]
1160            if len(window) == 0 or  len(window) > 2:
1161                raise ValueError("A window needs to be defined as [start(, end)]")
1162            if window[0] > window[1]:
1163                tmp = window[0]
1164                window[0] = window[1]
1165                window[1] = tmp
1166            for i in range(n):
1167                if data[i] >= window[0] and data[i] <= window[1]:
1168                    msk[i] = True
1169        if kwargs.has_key('invert'):
1170            if kwargs.get('invert'):
1171                msk = mask_not(msk)
1172        return msk
1173
1174    def get_masklist(self, mask=None, row=0, silent=False):
1175        """\
1176        Compute and return a list of mask windows, [min, max].
1177
1178        Parameters:
1179
1180            mask:       channel mask, created with create_mask.
1181
1182            row:        calcutate the masklist using the specified row
1183                        for unit conversions, default is row=0
1184                        only necessary if frequency varies over rows.
1185
1186        Returns:
1187
1188            [min, max], [min2, max2], ...
1189                Pairs of start/end points (inclusive)specifying
1190                the masked regions
1191
1192        """
1193        if not (isinstance(mask,list) or isinstance(mask, tuple)):
1194            raise TypeError("The mask should be list or tuple.")
1195        if len(mask) < 2:
1196            raise TypeError("The mask elements should be > 1")
1197        if self.nchan() != len(mask):
1198            msg = "Number of channels in scantable != number of mask elements"
1199            raise TypeError(msg)
1200        data = self._getabcissa(row)
1201        u = self._getcoordinfo()[0]
1202        if u == "":
1203            u = "channel"
1204        msg = "The current mask window unit is %s" % u
1205        i = self._check_ifs()
1206        if not i:
1207            msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1208        if not silent:
1209            asaplog.push(msg)
1210        masklist=[]
1211        ist, ien = None, None
1212        ist, ien=self.get_mask_indices(mask)
1213        if ist is not None and ien is not None:
1214            for i in xrange(len(ist)):
1215                range=[data[ist[i]],data[ien[i]]]
1216                range.sort()
1217                masklist.append([range[0],range[1]])
1218        return masklist
1219
1220    def get_mask_indices(self, mask=None):
1221        """\
1222        Compute and Return lists of mask start indices and mask end indices.
1223
1224        Parameters:
1225
1226            mask:       channel mask, created with create_mask.
1227
1228        Returns:
1229
1230            List of mask start indices and that of mask end indices,
1231            i.e., [istart1,istart2,....], [iend1,iend2,....].
1232
1233        """
1234        if not (isinstance(mask,list) or isinstance(mask, tuple)):
1235            raise TypeError("The mask should be list or tuple.")
1236        if len(mask) < 2:
1237            raise TypeError("The mask elements should be > 1")
1238        istart=[]
1239        iend=[]
1240        if mask[0]: istart.append(0)
1241        for i in range(len(mask)-1):
1242            if not mask[i] and mask[i+1]:
1243                istart.append(i+1)
1244            elif mask[i] and not mask[i+1]:
1245                iend.append(i)
1246        if mask[len(mask)-1]: iend.append(len(mask)-1)
1247        if len(istart) != len(iend):
1248            raise RuntimeError("Numbers of mask start != mask end.")
1249        for i in range(len(istart)):
1250            if istart[i] > iend[i]:
1251                raise RuntimeError("Mask start index > mask end index")
1252                break
1253        return istart,iend
1254
1255#    def get_restfreqs(self):
1256#        """
1257#        Get the restfrequency(s) stored in this scantable.
1258#        The return value(s) are always of unit 'Hz'
1259#        Parameters:
1260#            none
1261#        Returns:
1262#            a list of doubles
1263#        """
1264#        return list(self._getrestfreqs())
1265
1266    def get_restfreqs(self, ids=None):
1267        """\
1268        Get the restfrequency(s) stored in this scantable.
1269        The return value(s) are always of unit 'Hz'
1270
1271        Parameters:
1272
1273            ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1274                 be retrieved
1275
1276        Returns:
1277
1278            dictionary containing ids and a list of doubles for each id
1279
1280        """
1281        if ids is None:
1282            rfreqs={}
1283            idlist = self.getmolnos()
1284            for i in idlist:
1285                rfreqs[i]=list(self._getrestfreqs(i))
1286            return rfreqs
1287        else:
1288            if type(ids)==list or type(ids)==tuple:
1289                rfreqs={}
1290                for i in ids:
1291                    rfreqs[i]=list(self._getrestfreqs(i))
1292                return rfreqs
1293            else:
1294                return list(self._getrestfreqs(ids))
1295            #return list(self._getrestfreqs(ids))
1296
1297    def set_restfreqs(self, freqs=None, unit='Hz'):
1298        """\
1299        Set or replace the restfrequency specified and
1300        if the 'freqs' argument holds a scalar,
1301        then that rest frequency will be applied to all the selected
1302        data.  If the 'freqs' argument holds
1303        a vector, then it MUST be of equal or smaller length than
1304        the number of IFs (and the available restfrequencies will be
1305        replaced by this vector).  In this case, *all* data have
1306        the restfrequency set per IF according
1307        to the corresponding value you give in the 'freqs' vector.
1308        E.g. 'freqs=[1e9, 2e9]'  would mean IF 0 gets restfreq 1e9 and
1309        IF 1 gets restfreq 2e9.
1310
1311        You can also specify the frequencies via a linecatalog.
1312
1313        Parameters:
1314
1315            freqs:   list of rest frequency values or string idenitfiers
1316
1317            unit:    unit for rest frequency (default 'Hz')
1318
1319
1320        Example::
1321
1322            # set the given restfrequency for the all currently selected IFs
1323            scan.set_restfreqs(freqs=1.4e9)
1324            # set restfrequencies for the n IFs  (n > 1) in the order of the
1325            # list, i.e
1326            # IF0 -> 1.4e9, IF1 ->  1.41e9, IF3 -> 1.42e9
1327            # len(list_of_restfreqs) == nIF
1328            # for nIF == 1 the following will set multiple restfrequency for
1329            # that IF
1330            scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
1331            # set multiple restfrequencies per IF. as a list of lists where
1332            # the outer list has nIF elements, the inner s arbitrary
1333            scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
1334
1335       *Note*:
1336
1337            To do more sophisticate Restfrequency setting, e.g. on a
1338            source and IF basis, use scantable.set_selection() before using
1339            this function::
1340
1341                # provided your scantable is called scan
1342                selection = selector()
1343                selection.set_name("ORION*")
1344                selection.set_ifs([1])
1345                scan.set_selection(selection)
1346                scan.set_restfreqs(freqs=86.6e9)
1347
1348        """
1349        varlist = vars()
1350        from asap import linecatalog
1351        # simple  value
1352        if isinstance(freqs, int) or isinstance(freqs, float):
1353            self._setrestfreqs([freqs], [""], unit)
1354        # list of values
1355        elif isinstance(freqs, list) or isinstance(freqs, tuple):
1356            # list values are scalars
1357            if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
1358                if len(freqs) == 1:
1359                    self._setrestfreqs(freqs, [""], unit)
1360                else:
1361                    # allow the 'old' mode of setting mulitple IFs
1362                    sel = selector()
1363                    savesel = self._getselection()
1364                    iflist = self.getifnos()
1365                    if len(freqs)>len(iflist):
1366                        raise ValueError("number of elements in list of list "
1367                                         "exeeds the current IF selections")
1368                    iflist = self.getifnos()
1369                    for i, fval in enumerate(freqs):
1370                        sel.set_ifs(iflist[i])
1371                        self._setselection(sel)
1372                        self._setrestfreqs([fval], [""], unit)
1373                    self._setselection(savesel)
1374
1375            # list values are dict, {'value'=, 'name'=)
1376            elif isinstance(freqs[-1], dict):
1377                values = []
1378                names = []
1379                for d in freqs:
1380                    values.append(d["value"])
1381                    names.append(d["name"])
1382                self._setrestfreqs(values, names, unit)
1383            elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
1384                sel = selector()
1385                savesel = self._getselection()
1386                iflist = self.getifnos()
1387                if len(freqs)>len(iflist):
1388                    raise ValueError("number of elements in list of list exeeds"
1389                                     " the current IF selections")
1390                for i, fval in enumerate(freqs):
1391                    sel.set_ifs(iflist[i])
1392                    self._setselection(sel)
1393                    self._setrestfreqs(fval, [""], unit)
1394                self._setselection(savesel)
1395        # freqs are to be taken from a linecatalog
1396        elif isinstance(freqs, linecatalog):
1397            sel = selector()
1398            savesel = self._getselection()
1399            for i in xrange(freqs.nrow()):
1400                sel.set_ifs(iflist[i])
1401                self._setselection(sel)
1402                self._setrestfreqs([freqs.get_frequency(i)],
1403                                   [freqs.get_name(i)], "MHz")
1404                # ensure that we are not iterating past nIF
1405                if i == self.nif()-1: break
1406            self._setselection(savesel)
1407        else:
1408            return
1409        self._add_history("set_restfreqs", varlist)
1410
1411    def shift_refpix(self, delta):
1412        """\
1413        Shift the reference pixel of the Spectra Coordinate by an
1414        integer amount.
1415
1416        Parameters:
1417
1418            delta:   the amount to shift by
1419
1420        *Note*:
1421
1422            Be careful using this with broadband data.
1423
1424        """
1425        Scantable.shift_refpix(self, delta)
1426
1427    @asaplog_post_dec
1428    def history(self, filename=None):
1429        """\
1430        Print the history. Optionally to a file.
1431
1432        Parameters:
1433
1434            filename:    The name of the file to save the history to.
1435
1436        """
1437        hist = list(self._gethistory())
1438        out = "-"*80
1439        for h in hist:
1440            if h.startswith("---"):
1441                out = "\n".join([out, h])
1442            else:
1443                items = h.split("##")
1444                date = items[0]
1445                func = items[1]
1446                items = items[2:]
1447                out += "\n"+date+"\n"
1448                out += "Function: %s\n  Parameters:" % (func)
1449                for i in items:
1450                    if i == '':
1451                        continue
1452                    s = i.split("=")
1453                    out += "\n   %s = %s" % (s[0], s[1])
1454                out = "\n".join([out, "-"*80])
1455        if filename is not None:
1456            if filename is "":
1457                filename = 'scantable_history.txt'
1458            import os
1459            filename = os.path.expandvars(os.path.expanduser(filename))
1460            if not os.path.isdir(filename):
1461                data = open(filename, 'w')
1462                data.write(out)
1463                data.close()
1464            else:
1465                msg = "Illegal file name '%s'." % (filename)
1466                raise IOError(msg)
1467        return page(out)
1468    #
1469    # Maths business
1470    #
1471    @asaplog_post_dec
1472    def average_time(self, mask=None, scanav=False, weight='tint', align=False):
1473        """\
1474        Return the (time) weighted average of a scan.
1475
1476        *Note*:
1477
1478            in channels only - align if necessary
1479
1480        Parameters:
1481
1482            mask:     an optional mask (only used for 'var' and 'tsys'
1483                      weighting)
1484
1485            scanav:   True averages each scan separately
1486                      False (default) averages all scans together,
1487
1488            weight:   Weighting scheme.
1489                      'none'     (mean no weight)
1490                      'var'      (1/var(spec) weighted)
1491                      'tsys'     (1/Tsys**2 weighted)
1492                      'tint'     (integration time weighted)
1493                      'tintsys'  (Tint/Tsys**2)
1494                      'median'   ( median averaging)
1495                      The default is 'tint'
1496
1497            align:    align the spectra in velocity before averaging. It takes
1498                      the time of the first spectrum as reference time.
1499
1500        Example::
1501
1502            # time average the scantable without using a mask
1503            newscan = scan.average_time()
1504
1505        """
1506        varlist = vars()
1507        weight = weight or 'TINT'
1508        mask = mask or ()
1509        scanav = (scanav and 'SCAN') or 'NONE'
1510        scan = (self, )
1511
1512        if align:
1513            scan = (self.freq_align(insitu=False), )
1514        s = None
1515        if weight.upper() == 'MEDIAN':
1516            s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1517                                                     scanav))
1518        else:
1519            s = scantable(self._math._average(scan, mask, weight.upper(),
1520                          scanav))
1521        s._add_history("average_time", varlist)
1522        return s
1523
1524    @asaplog_post_dec
1525    def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
1526        """\
1527        Return a scan where all spectra are converted to either
1528        Jansky or Kelvin depending upon the flux units of the scan table.
1529        By default the function tries to look the values up internally.
1530        If it can't find them (or if you want to over-ride), you must
1531        specify EITHER jyperk OR eta (and D which it will try to look up
1532        also if you don't set it). jyperk takes precedence if you set both.
1533
1534        Parameters:
1535
1536            jyperk:      the Jy / K conversion factor
1537
1538            eta:         the aperture efficiency
1539
1540            d:           the geometric diameter (metres)
1541
1542            insitu:      if False a new scantable is returned.
1543                         Otherwise, the scaling is done in-situ
1544                         The default is taken from .asaprc (False)
1545
1546        """
1547        if insitu is None: insitu = rcParams['insitu']
1548        self._math._setinsitu(insitu)
1549        varlist = vars()
1550        jyperk = jyperk or -1.0
1551        d = d or -1.0
1552        eta = eta or -1.0
1553        s = scantable(self._math._convertflux(self, d, eta, jyperk))
1554        s._add_history("convert_flux", varlist)
1555        if insitu: self._assign(s)
1556        else: return s
1557
1558    @asaplog_post_dec
1559    def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1560        """\
1561        Return a scan after applying a gain-elevation correction.
1562        The correction can be made via either a polynomial or a
1563        table-based interpolation (and extrapolation if necessary).
1564        You specify polynomial coefficients, an ascii table or neither.
1565        If you specify neither, then a polynomial correction will be made
1566        with built in coefficients known for certain telescopes (an error
1567        will occur if the instrument is not known).
1568        The data and Tsys are *divided* by the scaling factors.
1569
1570        Parameters:
1571
1572            poly:        Polynomial coefficients (default None) to compute a
1573                         gain-elevation correction as a function of
1574                         elevation (in degrees).
1575
1576            filename:    The name of an ascii file holding correction factors.
1577                         The first row of the ascii file must give the column
1578                         names and these MUST include columns
1579                         "ELEVATION" (degrees) and "FACTOR" (multiply data
1580                         by this) somewhere.
1581                         The second row must give the data type of the
1582                         column. Use 'R' for Real and 'I' for Integer.
1583                         An example file would be
1584                         (actual factors are arbitrary) :
1585
1586                         TIME ELEVATION FACTOR
1587                         R R R
1588                         0.1 0 0.8
1589                         0.2 20 0.85
1590                         0.3 40 0.9
1591                         0.4 60 0.85
1592                         0.5 80 0.8
1593                         0.6 90 0.75
1594
1595            method:      Interpolation method when correcting from a table.
1596                         Values are  "nearest", "linear" (default), "cubic"
1597                         and "spline"
1598
1599            insitu:      if False a new scantable is returned.
1600                         Otherwise, the scaling is done in-situ
1601                         The default is taken from .asaprc (False)
1602
1603        """
1604
1605        if insitu is None: insitu = rcParams['insitu']
1606        self._math._setinsitu(insitu)
1607        varlist = vars()
1608        poly = poly or ()
1609        from os.path import expandvars
1610        filename = expandvars(filename)
1611        s = scantable(self._math._gainel(self, poly, filename, method))
1612        s._add_history("gain_el", varlist)
1613        if insitu:
1614            self._assign(s)
1615        else:
1616            return s
1617
1618    @asaplog_post_dec
1619    def freq_align(self, reftime=None, method='cubic', insitu=None):
1620        """\
1621        Return a scan where all rows have been aligned in frequency/velocity.
1622        The alignment frequency frame (e.g. LSRK) is that set by function
1623        set_freqframe.
1624
1625        Parameters:
1626
1627            reftime:     reference time to align at. By default, the time of
1628                         the first row of data is used.
1629
1630            method:      Interpolation method for regridding the spectra.
1631                         Choose from "nearest", "linear", "cubic" (default)
1632                         and "spline"
1633
1634            insitu:      if False a new scantable is returned.
1635                         Otherwise, the scaling is done in-situ
1636                         The default is taken from .asaprc (False)
1637
1638        """
1639        if insitu is None: insitu = rcParams["insitu"]
1640        self._math._setinsitu(insitu)
1641        varlist = vars()
1642        reftime = reftime or ""
1643        s = scantable(self._math._freq_align(self, reftime, method))
1644        s._add_history("freq_align", varlist)
1645        if insitu: self._assign(s)
1646        else: return s
1647
1648    @asaplog_post_dec
1649    def opacity(self, tau=None, insitu=None):
1650        """\
1651        Apply an opacity correction. The data
1652        and Tsys are multiplied by the correction factor.
1653
1654        Parameters:
1655
1656            tau:         (list of) opacity from which the correction factor is
1657                         exp(tau*ZD)
1658                         where ZD is the zenith-distance.
1659                         If a list is provided, it has to be of length nIF,
1660                         nIF*nPol or 1 and in order of IF/POL, e.g.
1661                         [opif0pol0, opif0pol1, opif1pol0 ...]
1662                         if tau is `None` the opacities are determined from a
1663                         model.
1664
1665            insitu:      if False a new scantable is returned.
1666                         Otherwise, the scaling is done in-situ
1667                         The default is taken from .asaprc (False)
1668
1669        """
1670        if insitu is None: insitu = rcParams['insitu']
1671        self._math._setinsitu(insitu)
1672        varlist = vars()
1673        if not hasattr(tau, "__len__"):
1674            tau = [tau]
1675        s = scantable(self._math._opacity(self, tau))
1676        s._add_history("opacity", varlist)
1677        if insitu: self._assign(s)
1678        else: return s
1679
1680    @asaplog_post_dec
1681    def bin(self, width=5, insitu=None):
1682        """\
1683        Return a scan where all spectra have been binned up.
1684
1685        Parameters:
1686
1687            width:       The bin width (default=5) in pixels
1688
1689            insitu:      if False a new scantable is returned.
1690                         Otherwise, the scaling is done in-situ
1691                         The default is taken from .asaprc (False)
1692
1693        """
1694        if insitu is None: insitu = rcParams['insitu']
1695        self._math._setinsitu(insitu)
1696        varlist = vars()
1697        s = scantable(self._math._bin(self, width))
1698        s._add_history("bin", varlist)
1699        if insitu:
1700            self._assign(s)
1701        else:
1702            return s
1703
1704    @asaplog_post_dec
1705    def resample(self, width=5, method='cubic', insitu=None):
1706        """\
1707        Return a scan where all spectra have been binned up.
1708
1709        Parameters:
1710
1711            width:       The bin width (default=5) in pixels
1712
1713            method:      Interpolation method when correcting from a table.
1714                         Values are  "nearest", "linear", "cubic" (default)
1715                         and "spline"
1716
1717            insitu:      if False a new scantable is returned.
1718                         Otherwise, the scaling is done in-situ
1719                         The default is taken from .asaprc (False)
1720
1721        """
1722        if insitu is None: insitu = rcParams['insitu']
1723        self._math._setinsitu(insitu)
1724        varlist = vars()
1725        s = scantable(self._math._resample(self, method, width))
1726        s._add_history("resample", varlist)
1727        if insitu: self._assign(s)
1728        else: return s
1729
1730    @asaplog_post_dec
1731    def average_pol(self, mask=None, weight='none'):
1732        """\
1733        Average the Polarisations together.
1734
1735        Parameters:
1736
1737            mask:        An optional mask defining the region, where the
1738                         averaging will be applied. The output will have all
1739                         specified points masked.
1740
1741            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1742                         weighted), or 'tsys' (1/Tsys**2 weighted)
1743
1744        """
1745        varlist = vars()
1746        mask = mask or ()
1747        s = scantable(self._math._averagepol(self, mask, weight.upper()))
1748        s._add_history("average_pol", varlist)
1749        return s
1750
1751    @asaplog_post_dec
1752    def average_beam(self, mask=None, weight='none'):
1753        """\
1754        Average the Beams together.
1755
1756        Parameters:
1757            mask:        An optional mask defining the region, where the
1758                         averaging will be applied. The output will have all
1759                         specified points masked.
1760
1761            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1762                         weighted), or 'tsys' (1/Tsys**2 weighted)
1763
1764        """
1765        varlist = vars()
1766        mask = mask or ()
1767        s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1768        s._add_history("average_beam", varlist)
1769        return s
1770
1771    def parallactify(self, pflag):
1772        """\
1773        Set a flag to indicate whether this data should be treated as having
1774        been 'parallactified' (total phase == 0.0)
1775
1776        Parameters:
1777
1778            pflag:  Bool indicating whether to turn this on (True) or
1779                    off (False)
1780
1781        """
1782        varlist = vars()
1783        self._parallactify(pflag)
1784        self._add_history("parallactify", varlist)
1785
1786    @asaplog_post_dec
1787    def convert_pol(self, poltype=None):
1788        """\
1789        Convert the data to a different polarisation type.
1790        Note that you will need cross-polarisation terms for most conversions.
1791
1792        Parameters:
1793
1794            poltype:    The new polarisation type. Valid types are:
1795                        "linear", "circular", "stokes" and "linpol"
1796
1797        """
1798        varlist = vars()
1799        s = scantable(self._math._convertpol(self, poltype))
1800        s._add_history("convert_pol", varlist)
1801        return s
1802
1803    @asaplog_post_dec
1804    def smooth(self, kernel="hanning", width=5.0, order=2, plot=False, insitu=None):
1805        """\
1806        Smooth the spectrum by the specified kernel (conserving flux).
1807
1808        Parameters:
1809
1810            kernel:     The type of smoothing kernel. Select from
1811                        'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1812                        or 'poly'
1813
1814            width:      The width of the kernel in pixels. For hanning this is
1815                        ignored otherwise it defauls to 5 pixels.
1816                        For 'gaussian' it is the Full Width Half
1817                        Maximum. For 'boxcar' it is the full width.
1818                        For 'rmedian' and 'poly' it is the half width.
1819
1820            order:      Optional parameter for 'poly' kernel (default is 2), to
1821                        specify the order of the polnomial. Ignored by all other
1822                        kernels.
1823
1824            plot:       plot the original and the smoothed spectra.
1825                        In this each indivual fit has to be approved, by
1826                        typing 'y' or 'n'
1827
1828            insitu:     if False a new scantable is returned.
1829                        Otherwise, the scaling is done in-situ
1830                        The default is taken from .asaprc (False)
1831
1832        """
1833        if insitu is None: insitu = rcParams['insitu']
1834        self._math._setinsitu(insitu)
1835        varlist = vars()
1836
1837        if plot: orgscan = self.copy()
1838
1839        s = scantable(self._math._smooth(self, kernel.lower(), width, order))
1840        s._add_history("smooth", varlist)
1841
1842        if plot:
1843            if rcParams['plotter.gui']:
1844                from asap.asaplotgui import asaplotgui as asaplot
1845            else:
1846                from asap.asaplot import asaplot
1847            self._p=asaplot()
1848            self._p.set_panels()
1849            ylab=s._get_ordinate_label()
1850            #self._p.palette(0,["#777777","red"])
1851            for r in xrange(s.nrow()):
1852                xsm=s._getabcissa(r)
1853                ysm=s._getspectrum(r)
1854                xorg=orgscan._getabcissa(r)
1855                yorg=orgscan._getspectrum(r)
1856                self._p.clear()
1857                self._p.hold()
1858                self._p.set_axes('ylabel',ylab)
1859                self._p.set_axes('xlabel',s._getabcissalabel(r))
1860                self._p.set_axes('title',s._getsourcename(r))
1861                self._p.set_line(label='Original',color="#777777")
1862                self._p.plot(xorg,yorg)
1863                self._p.set_line(label='Smoothed',color="red")
1864                self._p.plot(xsm,ysm)
1865                ### Ugly part for legend
1866                for i in [0,1]:
1867                    self._p.subplots[0]['lines'].append([self._p.subplots[0]['axes'].lines[i]])
1868                self._p.release()
1869                ### Ugly part for legend
1870                self._p.subplots[0]['lines']=[]
1871                res = raw_input("Accept smoothing ([y]/n): ")
1872                if res.upper() == 'N':
1873                    s._setspectrum(yorg, r)
1874            self._p.unmap()
1875            self._p = None
1876            del orgscan
1877
1878        if insitu: self._assign(s)
1879        else: return s
1880
1881    @asaplog_post_dec
1882    def old_poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None, rows=None):
1883        """\
1884        Return a scan which has been baselined (all rows) by a polynomial.
1885       
1886        Parameters:
1887
1888            mask:       an optional mask
1889
1890            order:      the order of the polynomial (default is 0)
1891
1892            plot:       plot the fit and the residual. In this each
1893                        indivual fit has to be approved, by typing 'y'
1894                        or 'n'
1895
1896            uselin:     use linear polynomial fit
1897
1898            insitu:     if False a new scantable is returned.
1899                        Otherwise, the scaling is done in-situ
1900                        The default is taken from .asaprc (False)
1901
1902            rows:       row numbers of spectra to be processed.
1903                        (default is None: for all rows)
1904       
1905        Example:
1906            # return a scan baselined by a third order polynomial,
1907            # not using a mask
1908            bscan = scan.poly_baseline(order=3)
1909
1910        """
1911        if insitu is None: insitu = rcParams['insitu']
1912        if not insitu:
1913            workscan = self.copy()
1914        else:
1915            workscan = self
1916        varlist = vars()
1917        if mask is None:
1918            mask = [True for i in xrange(self.nchan())]
1919
1920        try:
1921            f = fitter()
1922            if uselin:
1923                f.set_function(lpoly=order)
1924            else:
1925                f.set_function(poly=order)
1926
1927            if rows == None:
1928                rows = xrange(workscan.nrow())
1929            elif isinstance(rows, int):
1930                rows = [ rows ]
1931           
1932            if len(rows) > 0:
1933                self.blpars = []
1934                self.masklists = []
1935                self.actualmask = []
1936           
1937            for r in rows:
1938                f.x = workscan._getabcissa(r)
1939                f.y = workscan._getspectrum(r)
1940                f.mask = mask_and(mask, workscan._getmask(r))    # (CAS-1434)
1941                f.data = None
1942                f.fit()
1943                if plot:
1944                    f.plot(residual=True)
1945                    x = raw_input("Accept fit ( [y]/n ): ")
1946                    if x.upper() == 'N':
1947                        self.blpars.append(None)
1948                        self.masklists.append(None)
1949                        self.actualmask.append(None)
1950                        continue
1951                workscan._setspectrum(f.fitter.getresidual(), r)
1952                self.blpars.append(f.get_parameters())
1953                self.masklists.append(workscan.get_masklist(f.mask, row=r, silent=True))
1954                self.actualmask.append(f.mask)
1955
1956            if plot:
1957                f._p.unmap()
1958                f._p = None
1959            workscan._add_history("poly_baseline", varlist)
1960            if insitu:
1961                self._assign(workscan)
1962            else:
1963                return workscan
1964        except RuntimeError:
1965            msg = "The fit failed, possibly because it didn't converge."
1966            raise RuntimeError(msg)
1967
1968    @asaplog_post_dec
1969    def poly_baseline(self, mask=None, order=0, plot=False, batch=False, insitu=None, rows=None):
1970        """\
1971        Return a scan which has been baselined (all rows) by a polynomial.
1972        Parameters:
1973            mask:       an optional mask
1974            order:      the order of the polynomial (default is 0)
1975            plot:       plot the fit and the residual. In this each
1976                        indivual fit has to be approved, by typing 'y'
1977                        or 'n'. Ignored if batch = True.
1978            batch:      if True a faster algorithm is used and logs
1979                        including the fit results are not output
1980                        (default is False)
1981            insitu:     if False a new scantable is returned.
1982                        Otherwise, the scaling is done in-situ
1983                        The default is taken from .asaprc (False)
1984            rows:       row numbers of spectra to be baselined.
1985                        (default is None: for all rows)
1986        Example:
1987            # return a scan baselined by a third order polynomial,
1988            # not using a mask
1989            bscan = scan.poly_baseline(order=3)
1990        """
1991       
1992        varlist = vars()
1993       
1994        if insitu is None: insitu = rcParams["insitu"]
1995        if insitu:
1996            workscan = self
1997        else:
1998            workscan = self.copy()
1999
2000        nchan = workscan.nchan()
2001       
2002        if mask is None:
2003            mask = [True for i in xrange(nchan)]
2004
2005        try:
2006            if rows == None:
2007                rows = xrange(workscan.nrow())
2008            elif isinstance(rows, int):
2009                rows = [ rows ]
2010           
2011            if len(rows) > 0:
2012                workscan.blpars = []
2013                workscan.masklists = []
2014                workscan.actualmask = []
2015
2016            if batch:
2017                workscan._poly_baseline_batch(mask, order)
2018            elif plot:
2019                f = fitter()
2020                f.set_function(lpoly=order)
2021                for r in rows:
2022                    f.x = workscan._getabcissa(r)
2023                    f.y = workscan._getspectrum(r)
2024                    f.mask = mask_and(mask, workscan._getmask(r))    # (CAS-1434)
2025                    f.data = None
2026                    f.fit()
2027                   
2028                    f.plot(residual=True)
2029                    accept_fit = raw_input("Accept fit ( [y]/n ): ")
2030                    if accept_fit.upper() == "N":
2031                        self.blpars.append(None)
2032                        self.masklists.append(None)
2033                        self.actualmask.append(None)
2034                        continue
2035                    workscan._setspectrum(f.fitter.getresidual(), r)
2036                    workscan.blpars.append(f.get_parameters())
2037                    workscan.masklists.append(workscan.get_masklist(f.mask, row=r))
2038                    workscan.actualmask.append(f.mask)
2039                   
2040                f._p.unmap()
2041                f._p = None
2042            else:
2043                for r in rows:
2044                    fitparams = workscan._poly_baseline(mask, order, r)
2045                    params = fitparams.getparameters()
2046                    fmtd = ", ".join(["p%d = %3.6f" % (i, v) for i, v in enumerate(params)])
2047                    errors = fitparams.geterrors()
2048                    fmask = mask_and(mask, workscan._getmask(r))
2049
2050                    workscan.blpars.append({"params":params,
2051                                            "fixed": fitparams.getfixedparameters(),
2052                                            "formatted":fmtd, "errors":errors})
2053                    workscan.masklists.append(workscan.get_masklist(fmask, r, silent=True))
2054                    workscan.actualmask.append(fmask)
2055                   
2056                    asaplog.push(fmtd)
2057           
2058            workscan._add_history("poly_baseline", varlist)
2059           
2060            if insitu:
2061                self._assign(workscan)
2062            else:
2063                return workscan
2064           
2065        except RuntimeError, e:
2066            msg = "The fit failed, possibly because it didn't converge."
2067            if rcParams["verbose"]:
2068                asaplog.push(str(e))
2069                asaplog.push(str(msg))
2070                return
2071            else:
2072                raise RuntimeError(str(e)+'\n'+msg)
2073
2074
2075    def auto_poly_baseline(self, mask=None, edge=(0, 0), order=0,
2076                           threshold=3, chan_avg_limit=1, plot=False,
2077                           insitu=None, rows=None):
2078        """\
2079        Return a scan which has been baselined (all rows) by a polynomial.
2080        Spectral lines are detected first using linefinder and masked out
2081        to avoid them affecting the baseline solution.
2082
2083        Parameters:
2084
2085            mask:       an optional mask retreived from scantable
2086
2087            edge:       an optional number of channel to drop at the edge of
2088                        spectrum. If only one value is
2089                        specified, the same number will be dropped from
2090                        both sides of the spectrum. Default is to keep
2091                        all channels. Nested tuples represent individual
2092                        edge selection for different IFs (a number of spectral
2093                        channels can be different)
2094
2095            order:      the order of the polynomial (default is 0)
2096
2097            threshold:  the threshold used by line finder. It is better to
2098                        keep it large as only strong lines affect the
2099                        baseline solution.
2100
2101            chan_avg_limit:
2102                        a maximum number of consequtive spectral channels to
2103                        average during the search of weak and broad lines.
2104                        The default is no averaging (and no search for weak
2105                        lines). If such lines can affect the fitted baseline
2106                        (e.g. a high order polynomial is fitted), increase this
2107                        parameter (usually values up to 8 are reasonable). Most
2108                        users of this method should find the default value
2109                        sufficient.
2110
2111            plot:       plot the fit and the residual. In this each
2112                        indivual fit has to be approved, by typing 'y'
2113                        or 'n'
2114
2115            insitu:     if False a new scantable is returned.
2116                        Otherwise, the scaling is done in-situ
2117                        The default is taken from .asaprc (False)
2118            rows:       row numbers of spectra to be processed.
2119                        (default is None: for all rows)
2120
2121
2122        Example::
2123
2124            scan2 = scan.auto_poly_baseline(order=7, insitu=False)
2125
2126        """
2127        if insitu is None: insitu = rcParams['insitu']
2128        varlist = vars()
2129        from asap.asaplinefind import linefinder
2130        from asap import _is_sequence_or_number as _is_valid
2131
2132        # check whether edge is set up for each IF individually
2133        individualedge = False;
2134        if len(edge) > 1:
2135            if isinstance(edge[0], list) or isinstance(edge[0], tuple):
2136                individualedge = True;
2137
2138        if not _is_valid(edge, int) and not individualedge:
2139            raise ValueError, "Parameter 'edge' has to be an integer or a \
2140            pair of integers specified as a tuple. Nested tuples are allowed \
2141            to make individual selection for different IFs."
2142
2143        curedge = (0, 0)
2144        if individualedge:
2145            for edgepar in edge:
2146                if not _is_valid(edgepar, int):
2147                    raise ValueError, "Each element of the 'edge' tuple has \
2148                                       to be a pair of integers or an integer."
2149        else:
2150            curedge = edge;
2151
2152        if not insitu:
2153            workscan = self.copy()
2154        else:
2155            workscan = self
2156
2157        # setup fitter
2158        f = fitter()
2159        f.set_function(lpoly=order)
2160
2161        # setup line finder
2162        fl = linefinder()
2163        fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
2164
2165        fl.set_scan(workscan)
2166
2167        if mask is None:
2168            mask = _n_bools(workscan.nchan(), True)
2169       
2170        if rows is None:
2171            rows = xrange(workscan.nrow())
2172        elif isinstance(rows, int):
2173            rows = [ rows ]
2174       
2175        # Save parameters of baseline fits & masklists as a class attribute.
2176        # NOTICE: It does not reflect changes in scantable!
2177        if len(rows) > 0:
2178            self.blpars=[]
2179            self.masklists=[]
2180            self.actualmask=[]
2181        asaplog.push("Processing:")
2182        for r in rows:
2183            msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
2184                (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
2185                 workscan.getpol(r), workscan.getcycle(r))
2186            asaplog.push(msg, False)
2187
2188            # figure out edge parameter
2189            if individualedge:
2190                if len(edge) >= workscan.getif(r):
2191                    raise RuntimeError, "Number of edge elements appear to " \
2192                                        "be less than the number of IFs"
2193                    curedge = edge[workscan.getif(r)]
2194
2195            actualmask = mask_and(mask, workscan._getmask(r))    # (CAS-1434)
2196
2197            # setup line finder
2198            fl.find_lines(r, actualmask, curedge)
2199           
2200            f.x = workscan._getabcissa(r)
2201            f.y = workscan._getspectrum(r)
2202            f.mask = fl.get_mask()
2203            f.data = None
2204            f.fit()
2205
2206            # Show mask list
2207            masklist=workscan.get_masklist(f.mask, row=r, silent=True)
2208            msg = "mask range: "+str(masklist)
2209            asaplog.push(msg, False)
2210
2211            if plot:
2212                f.plot(residual=True)
2213                x = raw_input("Accept fit ( [y]/n ): ")
2214                if x.upper() == 'N':
2215                    self.blpars.append(None)
2216                    self.masklists.append(None)
2217                    self.actualmask.append(None)
2218                    continue
2219
2220            workscan._setspectrum(f.fitter.getresidual(), r)
2221            self.blpars.append(f.get_parameters())
2222            self.masklists.append(masklist)
2223            self.actualmask.append(f.mask)
2224        if plot:
2225            f._p.unmap()
2226            f._p = None
2227        workscan._add_history("auto_poly_baseline", varlist)
2228        if insitu:
2229            self._assign(workscan)
2230        else:
2231            return workscan
2232
2233    @asaplog_post_dec
2234    def rotate_linpolphase(self, angle):
2235        """\
2236        Rotate the phase of the complex polarization O=Q+iU correlation.
2237        This is always done in situ in the raw data.  So if you call this
2238        function more than once then each call rotates the phase further.
2239
2240        Parameters:
2241
2242            angle:   The angle (degrees) to rotate (add) by.
2243
2244        Example::
2245
2246            scan.rotate_linpolphase(2.3)
2247
2248        """
2249        varlist = vars()
2250        self._math._rotate_linpolphase(self, angle)
2251        self._add_history("rotate_linpolphase", varlist)
2252        return
2253
2254    @asaplog_post_dec
2255    def rotate_xyphase(self, angle):
2256        """\
2257        Rotate the phase of the XY correlation.  This is always done in situ
2258        in the data.  So if you call this function more than once
2259        then each call rotates the phase further.
2260
2261        Parameters:
2262
2263            angle:   The angle (degrees) to rotate (add) by.
2264
2265        Example::
2266
2267            scan.rotate_xyphase(2.3)
2268
2269        """
2270        varlist = vars()
2271        self._math._rotate_xyphase(self, angle)
2272        self._add_history("rotate_xyphase", varlist)
2273        return
2274
2275    @asaplog_post_dec
2276    def swap_linears(self):
2277        """\
2278        Swap the linear polarisations XX and YY, or better the first two
2279        polarisations as this also works for ciculars.
2280        """
2281        varlist = vars()
2282        self._math._swap_linears(self)
2283        self._add_history("swap_linears", varlist)
2284        return
2285
2286    @asaplog_post_dec
2287    def invert_phase(self):
2288        """\
2289        Invert the phase of the complex polarisation
2290        """
2291        varlist = vars()
2292        self._math._invert_phase(self)
2293        self._add_history("invert_phase", varlist)
2294        return
2295
2296    @asaplog_post_dec
2297    def add(self, offset, insitu=None):
2298        """\
2299        Return a scan where all spectra have the offset added
2300
2301        Parameters:
2302
2303            offset:      the offset
2304
2305            insitu:      if False a new scantable is returned.
2306                         Otherwise, the scaling is done in-situ
2307                         The default is taken from .asaprc (False)
2308
2309        """
2310        if insitu is None: insitu = rcParams['insitu']
2311        self._math._setinsitu(insitu)
2312        varlist = vars()
2313        s = scantable(self._math._unaryop(self, offset, "ADD", False))
2314        s._add_history("add", varlist)
2315        if insitu:
2316            self._assign(s)
2317        else:
2318            return s
2319
2320    @asaplog_post_dec
2321    def scale(self, factor, tsys=True, insitu=None):
2322        """\
2323
2324        Return a scan where all spectra are scaled by the given 'factor'
2325
2326        Parameters:
2327
2328            factor:      the scaling factor (float or 1D float list)
2329
2330            insitu:      if False a new scantable is returned.
2331                         Otherwise, the scaling is done in-situ
2332                         The default is taken from .asaprc (False)
2333
2334            tsys:        if True (default) then apply the operation to Tsys
2335                         as well as the data
2336
2337        """
2338        if insitu is None: insitu = rcParams['insitu']
2339        self._math._setinsitu(insitu)
2340        varlist = vars()
2341        s = None
2342        import numpy
2343        if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
2344            if isinstance(factor[0], list) or isinstance(factor[0], numpy.ndarray):
2345                from asapmath import _array2dOp
2346                s = _array2dOp( self.copy(), factor, "MUL", tsys )
2347            else:
2348                s = scantable( self._math._arrayop( self.copy(), factor, "MUL", tsys ) )
2349        else:
2350            s = scantable(self._math._unaryop(self.copy(), factor, "MUL", tsys))
2351        s._add_history("scale", varlist)
2352        if insitu:
2353            self._assign(s)
2354        else:
2355            return s
2356
2357    def set_sourcetype(self, match, matchtype="pattern",
2358                       sourcetype="reference"):
2359        """\
2360        Set the type of the source to be an source or reference scan
2361        using the provided pattern.
2362
2363        Parameters:
2364
2365            match:          a Unix style pattern, regular expression or selector
2366
2367            matchtype:      'pattern' (default) UNIX style pattern or
2368                            'regex' regular expression
2369
2370            sourcetype:     the type of the source to use (source/reference)
2371
2372        """
2373        varlist = vars()
2374        basesel = self.get_selection()
2375        stype = -1
2376        if sourcetype.lower().startswith("r"):
2377            stype = 1
2378        elif sourcetype.lower().startswith("s"):
2379            stype = 0
2380        else:
2381            raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
2382        if matchtype.lower().startswith("p"):
2383            matchtype = "pattern"
2384        elif matchtype.lower().startswith("r"):
2385            matchtype = "regex"
2386        else:
2387            raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
2388        sel = selector()
2389        if isinstance(match, selector):
2390            sel = match
2391        else:
2392            sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
2393        self.set_selection(basesel+sel)
2394        self._setsourcetype(stype)
2395        self.set_selection(basesel)
2396        self._add_history("set_sourcetype", varlist)
2397
2398    @asaplog_post_dec
2399    @preserve_selection
2400    def auto_quotient(self, preserve=True, mode='paired', verify=False):
2401        """\
2402        This function allows to build quotients automatically.
2403        It assumes the observation to have the same number of
2404        "ons" and "offs"
2405
2406        Parameters:
2407
2408            preserve:       you can preserve (default) the continuum or
2409                            remove it.  The equations used are
2410
2411                            preserve: Output = Toff * (on/off) - Toff
2412
2413                            remove:   Output = Toff * (on/off) - Ton
2414
2415            mode:           the on/off detection mode
2416                            'paired' (default)
2417                            identifies 'off' scans by the
2418                            trailing '_R' (Mopra/Parkes) or
2419                            '_e'/'_w' (Tid) and matches
2420                            on/off pairs from the observing pattern
2421                            'time'
2422                            finds the closest off in time
2423
2424        .. todo:: verify argument is not implemented
2425
2426        """
2427        varlist = vars()
2428        modes = ["time", "paired"]
2429        if not mode in modes:
2430            msg = "please provide valid mode. Valid modes are %s" % (modes)
2431            raise ValueError(msg)
2432        s = None
2433        if mode.lower() == "paired":
2434            sel = self.get_selection()
2435            sel.set_query("SRCTYPE==psoff")
2436            self.set_selection(sel)
2437            offs = self.copy()
2438            sel.set_query("SRCTYPE==pson")
2439            self.set_selection(sel)
2440            ons = self.copy()
2441            s = scantable(self._math._quotient(ons, offs, preserve))
2442        elif mode.lower() == "time":
2443            s = scantable(self._math._auto_quotient(self, mode, preserve))
2444        s._add_history("auto_quotient", varlist)
2445        return s
2446
2447    @asaplog_post_dec
2448    def mx_quotient(self, mask = None, weight='median', preserve=True):
2449        """\
2450        Form a quotient using "off" beams when observing in "MX" mode.
2451
2452        Parameters:
2453
2454            mask:           an optional mask to be used when weight == 'stddev'
2455
2456            weight:         How to average the off beams.  Default is 'median'.
2457
2458            preserve:       you can preserve (default) the continuum or
2459                            remove it.  The equations used are:
2460
2461                                preserve: Output = Toff * (on/off) - Toff
2462
2463                                remove:   Output = Toff * (on/off) - Ton
2464
2465        """
2466        mask = mask or ()
2467        varlist = vars()
2468        on = scantable(self._math._mx_extract(self, 'on'))
2469        preoff = scantable(self._math._mx_extract(self, 'off'))
2470        off = preoff.average_time(mask=mask, weight=weight, scanav=False)
2471        from asapmath  import quotient
2472        q = quotient(on, off, preserve)
2473        q._add_history("mx_quotient", varlist)
2474        return q
2475
2476    @asaplog_post_dec
2477    def freq_switch(self, insitu=None):
2478        """\
2479        Apply frequency switching to the data.
2480
2481        Parameters:
2482
2483            insitu:      if False a new scantable is returned.
2484                         Otherwise, the swictching is done in-situ
2485                         The default is taken from .asaprc (False)
2486
2487        """
2488        if insitu is None: insitu = rcParams['insitu']
2489        self._math._setinsitu(insitu)
2490        varlist = vars()
2491        s = scantable(self._math._freqswitch(self))
2492        s._add_history("freq_switch", varlist)
2493        if insitu:
2494            self._assign(s)
2495        else:
2496            return s
2497
2498    @asaplog_post_dec
2499    def recalc_azel(self):
2500        """Recalculate the azimuth and elevation for each position."""
2501        varlist = vars()
2502        self._recalcazel()
2503        self._add_history("recalc_azel", varlist)
2504        return
2505
2506    @asaplog_post_dec
2507    def __add__(self, other):
2508        varlist = vars()
2509        s = None
2510        if isinstance(other, scantable):
2511            s = scantable(self._math._binaryop(self, other, "ADD"))
2512        elif isinstance(other, float):
2513            s = scantable(self._math._unaryop(self, other, "ADD", False))
2514        else:
2515            raise TypeError("Other input is not a scantable or float value")
2516        s._add_history("operator +", varlist)
2517        return s
2518
2519    @asaplog_post_dec
2520    def __sub__(self, other):
2521        """
2522        implicit on all axes and on Tsys
2523        """
2524        varlist = vars()
2525        s = None
2526        if isinstance(other, scantable):
2527            s = scantable(self._math._binaryop(self, other, "SUB"))
2528        elif isinstance(other, float):
2529            s = scantable(self._math._unaryop(self, other, "SUB", False))
2530        else:
2531            raise TypeError("Other input is not a scantable or float value")
2532        s._add_history("operator -", varlist)
2533        return s
2534
2535    @asaplog_post_dec
2536    def __mul__(self, other):
2537        """
2538        implicit on all axes and on Tsys
2539        """
2540        varlist = vars()
2541        s = None
2542        if isinstance(other, scantable):
2543            s = scantable(self._math._binaryop(self, other, "MUL"))
2544        elif isinstance(other, float):
2545            s = scantable(self._math._unaryop(self, other, "MUL", False))
2546        else:
2547            raise TypeError("Other input is not a scantable or float value")
2548        s._add_history("operator *", varlist)
2549        return s
2550
2551
2552    @asaplog_post_dec
2553    def __div__(self, other):
2554        """
2555        implicit on all axes and on Tsys
2556        """
2557        varlist = vars()
2558        s = None
2559        if isinstance(other, scantable):
2560            s = scantable(self._math._binaryop(self, other, "DIV"))
2561        elif isinstance(other, float):
2562            if other == 0.0:
2563                raise ZeroDivisionError("Dividing by zero is not recommended")
2564            s = scantable(self._math._unaryop(self, other, "DIV", False))
2565        else:
2566            raise TypeError("Other input is not a scantable or float value")
2567        s._add_history("operator /", varlist)
2568        return s
2569
2570    @asaplog_post_dec
2571    def get_fit(self, row=0):
2572        """\
2573        Print or return the stored fits for a row in the scantable
2574
2575        Parameters:
2576
2577            row:    the row which the fit has been applied to.
2578
2579        """
2580        if row > self.nrow():
2581            return
2582        from asap.asapfit import asapfit
2583        fit = asapfit(self._getfit(row))
2584        asaplog.push( '%s' %(fit) )
2585        return fit.as_dict()
2586
2587    def flag_nans(self):
2588        """\
2589        Utility function to flag NaN values in the scantable.
2590        """
2591        import numpy
2592        basesel = self.get_selection()
2593        for i in range(self.nrow()):
2594            sel = self.get_row_selector(i)
2595            self.set_selection(basesel+sel)
2596            nans = numpy.isnan(self._getspectrum(0))
2597        if numpy.any(nans):
2598            bnans = [ bool(v) for v in nans]
2599            self.flag(bnans)
2600        self.set_selection(basesel)
2601
2602    def get_row_selector(self, rowno):
2603        return selector(beams=self.getbeam(rowno),
2604                        ifs=self.getif(rowno),
2605                        pols=self.getpol(rowno),
2606                        scans=self.getscan(rowno),
2607                        cycles=self.getcycle(rowno))
2608
2609    def _add_history(self, funcname, parameters):
2610        if not rcParams['scantable.history']:
2611            return
2612        # create date
2613        sep = "##"
2614        from datetime import datetime
2615        dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
2616        hist = dstr+sep
2617        hist += funcname+sep#cdate+sep
2618        if parameters.has_key('self'): del parameters['self']
2619        for k, v in parameters.iteritems():
2620            if type(v) is dict:
2621                for k2, v2 in v.iteritems():
2622                    hist += k2
2623                    hist += "="
2624                    if isinstance(v2, scantable):
2625                        hist += 'scantable'
2626                    elif k2 == 'mask':
2627                        if isinstance(v2, list) or isinstance(v2, tuple):
2628                            hist += str(self._zip_mask(v2))
2629                        else:
2630                            hist += str(v2)
2631                    else:
2632                        hist += str(v2)
2633            else:
2634                hist += k
2635                hist += "="
2636                if isinstance(v, scantable):
2637                    hist += 'scantable'
2638                elif k == 'mask':
2639                    if isinstance(v, list) or isinstance(v, tuple):
2640                        hist += str(self._zip_mask(v))
2641                    else:
2642                        hist += str(v)
2643                else:
2644                    hist += str(v)
2645            hist += sep
2646        hist = hist[:-2] # remove trailing '##'
2647        self._addhistory(hist)
2648
2649
2650    def _zip_mask(self, mask):
2651        mask = list(mask)
2652        i = 0
2653        segments = []
2654        while mask[i:].count(1):
2655            i += mask[i:].index(1)
2656            if mask[i:].count(0):
2657                j = i + mask[i:].index(0)
2658            else:
2659                j = len(mask)
2660            segments.append([i, j])
2661            i = j
2662        return segments
2663
2664    def _get_ordinate_label(self):
2665        fu = "("+self.get_fluxunit()+")"
2666        import re
2667        lbl = "Intensity"
2668        if re.match(".K.", fu):
2669            lbl = "Brightness Temperature "+ fu
2670        elif re.match(".Jy.", fu):
2671            lbl = "Flux density "+ fu
2672        return lbl
2673
2674    def _check_ifs(self):
2675        nchans = [self.nchan(i) for i in range(self.nif(-1))]
2676        nchans = filter(lambda t: t > 0, nchans)
2677        return (sum(nchans)/len(nchans) == nchans[0])
2678
2679    @asaplog_post_dec
2680    #def _fill(self, names, unit, average, getpt, antenna):
2681    def _fill(self, names, unit, average, opts={}):
2682        first = True
2683        fullnames = []
2684        for name in names:
2685            name = os.path.expandvars(name)
2686            name = os.path.expanduser(name)
2687            if not os.path.exists(name):
2688                msg = "File '%s' does not exists" % (name)
2689                raise IOError(msg)
2690            fullnames.append(name)
2691        if average:
2692            asaplog.push('Auto averaging integrations')
2693        stype = int(rcParams['scantable.storage'].lower() == 'disk')
2694        for name in fullnames:
2695            tbl = Scantable(stype)
2696            r = filler(tbl)
2697            rx = rcParams['scantable.reference']
2698            r.setreferenceexpr(rx)
2699            msg = "Importing %s..." % (name)
2700            asaplog.push(msg, False)
2701            #opts = {'ms': {'antenna' : antenna, 'getpt': getpt} }
2702            r.open(name, opts)# antenna, -1, -1, getpt)
2703            r.fill()
2704            if average:
2705                tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
2706            if not first:
2707                tbl = self._math._merge([self, tbl])
2708            Scantable.__init__(self, tbl)
2709            r.close()
2710            del r, tbl
2711            first = False
2712            #flush log
2713        asaplog.post()
2714        if unit is not None:
2715            self.set_fluxunit(unit)
2716        if not is_casapy():
2717            self.set_freqframe(rcParams['scantable.freqframe'])
2718
2719    def __getitem__(self, key):
2720        if key < 0:
2721            key += self.nrow()
2722        if key >= self.nrow():
2723            raise IndexError("Row index out of range.")
2724        return self._getspectrum(key)
2725
2726    def __setitem__(self, key, value):
2727        if key < 0:
2728            key += self.nrow()
2729        if key >= self.nrow():
2730            raise IndexError("Row index out of range.")
2731        if not hasattr(value, "__len__") or \
2732                len(value) > self.nchan(self.getif(key)):
2733            raise ValueError("Spectrum length doesn't match.")
2734        return self._setspectrum(value, key)
2735
2736    def __len__(self):
2737        return self.nrow()
2738
2739    def __iter__(self):
2740        for i in range(len(self)):
2741            yield self[i]
Note: See TracBrowser for help on using the repository browser.