source: trunk/python/scantable.py @ 1948

Last change on this file since 1948 was 1948, checked in by Kana Sugimoto, 13 years ago

New Development: No

JIRA Issue: No

Ready for Test: Yes

Interface Changes: Yes

What Interface Changed: changed default value of prec=-1 in scantable.get_time

Test Programs:

Put in Release Notes: No

Module(s):

Description:

Chenged the default value of parameter prec=-1 in scantable.get_time.
When prec=-1, necessary time precision is automatically calculated based on
minimum integration time in the scantable.


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 93.2 KB
Line 
1"""This module defines the scantable class."""
2
3import os
4import numpy
5try:
6    from functools import wraps as wraps_dec
7except ImportError:
8    from asap.compatibility import wraps as wraps_dec
9
10from asap.env import is_casapy
11from asap._asap import Scantable
12from asap._asap import filler
13from asap.parameters import rcParams
14from asap.logging import asaplog, asaplog_post_dec
15from asap.selector import selector
16from asap.linecatalog import linecatalog
17from asap.coordinate import coordinate
18from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
19from asap.asapfitter import fitter
20
21
22def preserve_selection(func):
23    @wraps_dec(func)
24    def wrap(obj, *args, **kw):
25        basesel = obj.get_selection()
26        try:
27            val = func(obj, *args, **kw)
28        finally:
29            obj.set_selection(basesel)
30        return val
31    return wrap
32
33def is_scantable(filename):
34    """Is the given file a scantable?
35
36    Parameters:
37
38        filename: the name of the file/directory to test
39
40    """
41    if ( os.path.isdir(filename)
42         and os.path.exists(filename+'/table.info')
43         and os.path.exists(filename+'/table.dat') ):
44        f=open(filename+'/table.info')
45        l=f.readline()
46        f.close()
47        #if ( l.find('Scantable') != -1 ):
48        if ( l.find('Measurement Set') == -1 ):
49            return True
50        else:
51            return False
52    else:
53        return False
54##     return (os.path.isdir(filename)
55##             and not os.path.exists(filename+'/table.f1')
56##             and os.path.exists(filename+'/table.info'))
57
58def is_ms(filename):
59    """Is the given file a MeasurementSet?
60
61    Parameters:
62
63        filename: the name of the file/directory to test
64
65    """
66    if ( os.path.isdir(filename)
67         and os.path.exists(filename+'/table.info')
68         and os.path.exists(filename+'/table.dat') ):
69        f=open(filename+'/table.info')
70        l=f.readline()
71        f.close()
72        if ( l.find('Measurement Set') != -1 ):
73            return True
74        else:
75            return False
76    else:
77        return False
78   
79class scantable(Scantable):
80    """\
81        The ASAP container for scans (single-dish data).
82    """
83
84    @asaplog_post_dec
85    #def __init__(self, filename, average=None, unit=None, getpt=None,
86    #             antenna=None, parallactify=None):
87    def __init__(self, filename, average=None, unit=None, parallactify=None, **args):
88        """\
89        Create a scantable from a saved one or make a reference
90
91        Parameters:
92
93            filename:     the name of an asap table on disk
94                          or
95                          the name of a rpfits/sdfits/ms file
96                          (integrations within scans are auto averaged
97                          and the whole file is read) or
98                          [advanced] a reference to an existing scantable
99
100            average:      average all integrations withinb a scan on read.
101                          The default (True) is taken from .asaprc.
102
103            unit:         brightness unit; must be consistent with K or Jy.
104                          Over-rides the default selected by the filler
105                          (input rpfits/sdfits/ms) or replaces the value
106                          in existing scantables
107
108            getpt:        for MeasurementSet input data only:
109                          If True, all pointing data are filled.
110                          The deafult is False, which makes time to load
111                          the MS data faster in some cases.
112
113            antenna:      for MeasurementSet input data only:
114                          Antenna selection. integer (id) or string (name or id).
115
116            parallactify: Indicate that the data had been parallatified. Default
117                          is taken from rc file.
118
119        """
120        if average is None:
121            average = rcParams['scantable.autoaverage']
122        #if getpt is None:
123        #    getpt = True
124        #if antenna is not None:
125        #    asaplog.push("Antenna selection currently unsupported."
126        #                 "Using ''")
127        #    asaplog.post('WARN')
128        #if antenna is None:
129        #    antenna = ''
130        #elif type(antenna) == int:
131        #    antenna = '%s' % antenna
132        #elif type(antenna) == list:
133        #    tmpstr = ''
134        #    for i in range( len(antenna) ):
135        #        if type(antenna[i]) == int:
136        #            tmpstr = tmpstr + ('%s,'%(antenna[i]))
137        #        elif type(antenna[i]) == str:
138        #            tmpstr=tmpstr+antenna[i]+','
139        #        else:
140        #            raise TypeError('Bad antenna selection.')
141        #    antenna = tmpstr.rstrip(',')
142        parallactify = parallactify or rcParams['scantable.parallactify']
143        varlist = vars()
144        from asap._asap import stmath
145        self._math = stmath( rcParams['insitu'] )
146        if isinstance(filename, Scantable):
147            Scantable.__init__(self, filename)
148        else:
149            if isinstance(filename, str):
150                filename = os.path.expandvars(filename)
151                filename = os.path.expanduser(filename)
152                if not os.path.exists(filename):
153                    s = "File '%s' not found." % (filename)
154                    raise IOError(s)
155                if is_scantable(filename):
156                    ondisk = rcParams['scantable.storage'] == 'disk'
157                    Scantable.__init__(self, filename, ondisk)
158                    if unit is not None:
159                        self.set_fluxunit(unit)
160                    # do not reset to the default freqframe
161                    #self.set_freqframe(rcParams['scantable.freqframe'])
162                #elif os.path.isdir(filename) \
163                #         and not os.path.exists(filename+'/table.f1'):
164                elif is_ms(filename):
165                    # Measurement Set
166                    opts={'ms': {}}
167                    mskeys=['getpt','antenna']
168                    for key in mskeys:
169                        if key in args.keys():
170                            opts['ms'][key] = args[key]
171                    #self._fill([filename], unit, average, getpt, antenna)
172                    self._fill([filename], unit, average, opts)
173                elif os.path.isfile(filename):
174                    #self._fill([filename], unit, average, getpt, antenna)
175                    self._fill([filename], unit, average)
176                else:
177                    msg = "The given file '%s'is not a valid " \
178                          "asap table." % (filename)
179                    raise IOError(msg)
180            elif (isinstance(filename, list) or isinstance(filename, tuple)) \
181                  and isinstance(filename[-1], str):
182                #self._fill(filename, unit, average, getpt, antenna)
183                self._fill(filename, unit, average)
184        self.parallactify(parallactify)
185        self._add_history("scantable", varlist)
186
187    @asaplog_post_dec
188    def save(self, name=None, format=None, overwrite=False):
189        """\
190        Store the scantable on disk. This can be an asap (aips++) Table,
191        SDFITS or MS2 format.
192
193        Parameters:
194
195            name:        the name of the outputfile. For format "ASCII"
196                         this is the root file name (data in 'name'.txt
197                         and header in 'name'_header.txt)
198
199            format:      an optional file format. Default is ASAP.
200                         Allowed are:
201
202                            * 'ASAP' (save as ASAP [aips++] Table),
203                            * 'SDFITS' (save as SDFITS file)
204                            * 'ASCII' (saves as ascii text file)
205                            * 'MS2' (saves as an casacore MeasurementSet V2)
206                            * 'FITS' (save as image FITS - not readable by class)
207                            * 'CLASS' (save as FITS readable by CLASS)
208
209            overwrite:   If the file should be overwritten if it exists.
210                         The default False is to return with warning
211                         without writing the output. USE WITH CARE.
212
213        Example::
214
215            scan.save('myscan.asap')
216            scan.save('myscan.sdfits', 'SDFITS')
217
218        """
219        from os import path
220        format = format or rcParams['scantable.save']
221        suffix = '.'+format.lower()
222        if name is None or name == "":
223            name = 'scantable'+suffix
224            msg = "No filename given. Using default name %s..." % name
225            asaplog.push(msg)
226        name = path.expandvars(name)
227        if path.isfile(name) or path.isdir(name):
228            if not overwrite:
229                msg = "File %s exists." % name
230                raise IOError(msg)
231        format2 = format.upper()
232        if format2 == 'ASAP':
233            self._save(name)
234        else:
235            from asap._asap import stwriter as stw
236            writer = stw(format2)
237            writer.write(self, name)
238        return
239
240    def copy(self):
241        """Return a copy of this scantable.
242
243        *Note*:
244
245            This makes a full (deep) copy. scan2 = scan1 makes a reference.
246
247        Example::
248
249            copiedscan = scan.copy()
250
251        """
252        sd = scantable(Scantable._copy(self))
253        return sd
254
255    def drop_scan(self, scanid=None):
256        """\
257        Return a new scantable where the specified scan number(s) has(have)
258        been dropped.
259
260        Parameters:
261
262            scanid:    a (list of) scan number(s)
263
264        """
265        from asap import _is_sequence_or_number as _is_valid
266        from asap import _to_list
267        from asap import unique
268        if not _is_valid(scanid):
269            raise RuntimeError( 'Please specify a scanno to drop from the scantable' )
270        scanid = _to_list(scanid)
271        allscans = unique([ self.getscan(i) for i in range(self.nrow())])
272        for sid in scanid: allscans.remove(sid)
273        if len(allscans) == 0:
274            raise ValueError("Can't remove all scans")
275        sel = selector(scans=allscans)
276        return self._select_copy(sel)
277
278    def _select_copy(self, selection):
279        orig = self.get_selection()
280        self.set_selection(orig+selection)
281        cp = self.copy()
282        self.set_selection(orig)
283        return cp
284
285    def get_scan(self, scanid=None):
286        """\
287        Return a specific scan (by scanno) or collection of scans (by
288        source name) in a new scantable.
289
290        *Note*:
291
292            See scantable.drop_scan() for the inverse operation.
293
294        Parameters:
295
296            scanid:    a (list of) scanno or a source name, unix-style
297                       patterns are accepted for source name matching, e.g.
298                       '*_R' gets all 'ref scans
299
300        Example::
301
302            # get all scans containing the source '323p459'
303            newscan = scan.get_scan('323p459')
304            # get all 'off' scans
305            refscans = scan.get_scan('*_R')
306            # get a susbset of scans by scanno (as listed in scan.summary())
307            newscan = scan.get_scan([0, 2, 7, 10])
308
309        """
310        if scanid is None:
311            raise RuntimeError( 'Please specify a scan no or name to '
312                                'retrieve from the scantable' )
313        try:
314            bsel = self.get_selection()
315            sel = selector()
316            if type(scanid) is str:
317                sel.set_name(scanid)
318                return self._select_copy(sel)
319            elif type(scanid) is int:
320                sel.set_scans([scanid])
321                return self._select_copy(sel)
322            elif type(scanid) is list:
323                sel.set_scans(scanid)
324                return self._select_copy(sel)
325            else:
326                msg = "Illegal scanid type, use 'int' or 'list' if ints."
327                raise TypeError(msg)
328        except RuntimeError:
329            raise
330
331    def __str__(self):
332        return Scantable._summary(self, True)
333
334    def summary(self, filename=None):
335        """\
336        Print a summary of the contents of this scantable.
337
338        Parameters:
339
340            filename:    the name of a file to write the putput to
341                         Default - no file output
342
343        """
344        info = Scantable._summary(self, True)
345        if filename is not None:
346            if filename is "":
347                filename = 'scantable_summary.txt'
348            from os.path import expandvars, isdir
349            filename = expandvars(filename)
350            if not isdir(filename):
351                data = open(filename, 'w')
352                data.write(info)
353                data.close()
354            else:
355                msg = "Illegal file name '%s'." % (filename)
356                raise IOError(msg)
357        return page(info)
358
359    def get_spectrum(self, rowno):
360        """Return the spectrum for the current row in the scantable as a list.
361
362        Parameters:
363
364             rowno:   the row number to retrieve the spectrum from
365
366        """
367        return self._getspectrum(rowno)
368
369    def get_mask(self, rowno):
370        """Return the mask for the current row in the scantable as a list.
371
372        Parameters:
373
374             rowno:   the row number to retrieve the mask from
375
376        """
377        return self._getmask(rowno)
378
379    def set_spectrum(self, spec, rowno):
380        """Set the spectrum for the current row in the scantable.
381
382        Parameters:
383
384             spec:   the new spectrum
385
386             rowno:  the row number to set the spectrum for
387
388        """
389        assert(len(spec) == self.nchan())
390        return self._setspectrum(spec, rowno)
391
392    def get_coordinate(self, rowno):
393        """Return the (spectral) coordinate for a a given 'rowno'.
394
395        *Note*:
396
397            * This coordinate is only valid until a scantable method modifies
398              the frequency axis.
399            * This coordinate does contain the original frequency set-up
400              NOT the new frame. The conversions however are done using the user
401              specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
402              use scantable.freq_align first. Without it there is no closure,
403              i.e.::
404
405                  c = myscan.get_coordinate(0)
406                  c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
407
408        Parameters:
409
410             rowno:    the row number for the spectral coordinate
411
412        """
413        return coordinate(Scantable.get_coordinate(self, rowno))
414
415    def get_selection(self):
416        """\
417        Get the selection object currently set on this scantable.
418
419        Example::
420
421            sel = scan.get_selection()
422            sel.set_ifs(0)              # select IF 0
423            scan.set_selection(sel)     # apply modified selection
424
425        """
426        return selector(self._getselection())
427
428    def set_selection(self, selection=None, **kw):
429        """\
430        Select a subset of the data. All following operations on this scantable
431        are only applied to thi selection.
432
433        Parameters:
434
435            selection:    a selector object (default unset the selection), or
436                          any combination of "pols", "ifs", "beams", "scans",
437                          "cycles", "name", "query"
438
439        Examples::
440
441            sel = selector()         # create a selection object
442            self.set_scans([0, 3])    # select SCANNO 0 and 3
443            scan.set_selection(sel)  # set the selection
444            scan.summary()           # will only print summary of scanno 0 an 3
445            scan.set_selection()     # unset the selection
446            # or the equivalent
447            scan.set_selection(scans=[0,3])
448            scan.summary()           # will only print summary of scanno 0 an 3
449            scan.set_selection()     # unset the selection
450
451        """
452        if selection is None:
453            # reset
454            if len(kw) == 0:
455                selection = selector()
456            else:
457                # try keywords
458                for k in kw:
459                    if k not in selector.fields:
460                        raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
461                selection = selector(**kw)
462        self._setselection(selection)
463
464    def get_row(self, row=0, insitu=None):
465        """\
466        Select a row in the scantable.
467        Return a scantable with single row.
468
469        Parameters:
470
471            row:    row no of integration, default is 0.
472            insitu: if False a new scantable is returned. Otherwise, the
473                    scaling is done in-situ. The default is taken from .asaprc
474                    (False)
475
476        """
477        if insitu is None: insitu = rcParams['insitu']
478        if not insitu:
479            workscan = self.copy()
480        else:
481            workscan = self
482        # Select a row
483        sel=selector()
484        sel.set_scans([workscan.getscan(row)])
485        sel.set_cycles([workscan.getcycle(row)])
486        sel.set_beams([workscan.getbeam(row)])
487        sel.set_ifs([workscan.getif(row)])
488        sel.set_polarisations([workscan.getpol(row)])
489        sel.set_name(workscan._getsourcename(row))
490        workscan.set_selection(sel)
491        if not workscan.nrow() == 1:
492            msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
493            raise RuntimeError(msg)
494        del sel
495        if insitu:
496            self._assign(workscan)
497        else:
498            return workscan
499
500    @asaplog_post_dec
501    def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
502        """\
503        Determine the specified statistic of the current beam/if/pol
504        Takes a 'mask' as an optional parameter to specify which
505        channels should be excluded.
506
507        Parameters:
508
509            stat:    'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
510                     'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
511
512            mask:    an optional mask specifying where the statistic
513                     should be determined.
514
515            form:    format string to print statistic values
516
517            row:     row number of spectrum to process.
518                     (default is None: for all rows)
519
520        Example:
521            scan.set_unit('channel')
522            msk = scan.create_mask([100, 200], [500, 600])
523            scan.stats(stat='mean', mask=m)
524
525        """
526        mask = mask or []
527        if not self._check_ifs():
528            raise ValueError("Cannot apply mask as the IFs have different "
529                             "number of channels. Please use setselection() "
530                             "to select individual IFs")
531        rtnabc = False
532        if stat.lower().endswith('_abc'): rtnabc = True
533        getchan = False
534        if stat.lower().startswith('min') or stat.lower().startswith('max'):
535            chan = self._math._minmaxchan(self, mask, stat)
536            getchan = True
537            statvals = []
538        if not rtnabc:
539            if row == None:
540                statvals = self._math._stats(self, mask, stat)
541            else:
542                statvals = self._math._statsrow(self, mask, stat, int(row))
543
544        #def cb(i):
545        #    return statvals[i]
546
547        #return self._row_callback(cb, stat)
548
549        label=stat
550        #callback=cb
551        out = ""
552        #outvec = []
553        sep = '-'*50
554
555        if row == None:
556            rows = xrange(self.nrow())
557        elif isinstance(row, int):
558            rows = [ row ]
559
560        for i in rows:
561            refstr = ''
562            statunit= ''
563            if getchan:
564                qx, qy = self.chan2data(rowno=i, chan=chan[i])
565                if rtnabc:
566                    statvals.append(qx['value'])
567                    refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
568                    statunit= '['+qx['unit']+']'
569                else:
570                    refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
571
572            tm = self._gettime(i)
573            src = self._getsourcename(i)
574            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
575            out += 'Time[%s]:\n' % (tm)
576            if self.nbeam(-1) > 1: out +=  ' Beam[%d] ' % (self.getbeam(i))
577            if self.nif(-1) > 1:   out +=  ' IF[%d] ' % (self.getif(i))
578            if self.npol(-1) > 1:  out +=  ' Pol[%d] ' % (self.getpol(i))
579            #outvec.append(callback(i))
580            if len(rows) > 1:
581                # out += ('= %'+form) % (outvec[i]) +'   '+refstr+'\n'
582                out += ('= %'+form) % (statvals[i]) +'   '+refstr+'\n'
583            else:
584                # out += ('= %'+form) % (outvec[0]) +'   '+refstr+'\n'
585                out += ('= %'+form) % (statvals[0]) +'   '+refstr+'\n'
586            out +=  sep+"\n"
587
588        import os
589        if os.environ.has_key( 'USER' ):
590            usr = os.environ['USER']
591        else:
592            import commands
593            usr = commands.getoutput( 'whoami' )
594        tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
595        f = open(tmpfile,'w')
596        print >> f, sep
597        print >> f, ' %s %s' % (label, statunit)
598        print >> f, sep
599        print >> f, out
600        f.close()
601        f = open(tmpfile,'r')
602        x = f.readlines()
603        f.close()
604        asaplog.push(''.join(x), False)
605
606        return statvals
607
608    def chan2data(self, rowno=0, chan=0):
609        """\
610        Returns channel/frequency/velocity and spectral value
611        at an arbitrary row and channel in the scantable.
612
613        Parameters:
614
615            rowno:   a row number in the scantable. Default is the
616                     first row, i.e. rowno=0
617
618            chan:    a channel in the scantable. Default is the first
619                     channel, i.e. pos=0
620
621        """
622        if isinstance(rowno, int) and isinstance(chan, int):
623            qx = {'unit': self.get_unit(),
624                  'value': self._getabcissa(rowno)[chan]}
625            qy = {'unit': self.get_fluxunit(),
626                  'value': self._getspectrum(rowno)[chan]}
627            return qx, qy
628
629    def stddev(self, mask=None):
630        """\
631        Determine the standard deviation of the current beam/if/pol
632        Takes a 'mask' as an optional parameter to specify which
633        channels should be excluded.
634
635        Parameters:
636
637            mask:    an optional mask specifying where the standard
638                     deviation should be determined.
639
640        Example::
641
642            scan.set_unit('channel')
643            msk = scan.create_mask([100, 200], [500, 600])
644            scan.stddev(mask=m)
645
646        """
647        return self.stats(stat='stddev', mask=mask);
648
649
650    def get_column_names(self):
651        """\
652        Return a  list of column names, which can be used for selection.
653        """
654        return list(Scantable.get_column_names(self))
655
656    def get_tsys(self, row=-1):
657        """\
658        Return the System temperatures.
659
660        Parameters:
661
662            row:    the rowno to get the information for. (default all rows)
663
664        Returns:
665
666            a list of Tsys values for the current selection
667
668        """
669        if row > -1:
670            return self._get_column(self._gettsys, row)
671        return self._row_callback(self._gettsys, "Tsys")
672
673
674    def get_weather(self, row=-1):
675        """\
676        Return the weather informations.
677
678        Parameters:
679
680            row:    the rowno to get the information for. (default all rows)
681
682        Returns:
683
684            a dict or list of of dicts of values for the current selection
685
686        """
687
688        values = self._get_column(self._get_weather, row)
689        if row > -1:
690            return {'temperature': values[0],
691                    'pressure': values[1], 'humidity' : values[2],
692                    'windspeed' : values[3], 'windaz' : values[4]
693                    }
694        else:
695            out = []
696            for r in values:
697
698                out.append({'temperature': r[0],
699                            'pressure': r[1], 'humidity' : r[2],
700                            'windspeed' : r[3], 'windaz' : r[4]
701                    })
702            return out
703
704    def _row_callback(self, callback, label):
705        out = ""
706        outvec = []
707        sep = '-'*50
708        for i in range(self.nrow()):
709            tm = self._gettime(i)
710            src = self._getsourcename(i)
711            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
712            out += 'Time[%s]:\n' % (tm)
713            if self.nbeam(-1) > 1:
714                out +=  ' Beam[%d] ' % (self.getbeam(i))
715            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (self.getif(i))
716            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (self.getpol(i))
717            outvec.append(callback(i))
718            out += '= %3.3f\n' % (outvec[i])
719            out +=  sep+'\n'
720
721        asaplog.push(sep)
722        asaplog.push(" %s" % (label))
723        asaplog.push(sep)
724        asaplog.push(out)
725        asaplog.post()
726        return outvec
727
728    def _get_column(self, callback, row=-1, *args):
729        """
730        """
731        if row == -1:
732            return [callback(i, *args) for i in range(self.nrow())]
733        else:
734            if  0 <= row < self.nrow():
735                return callback(row, *args)
736
737
738    def get_time(self, row=-1, asdatetime=False, prec=-1):
739        """\
740        Get a list of time stamps for the observations.
741        Return a datetime object or a string (default) for each
742        integration time stamp in the scantable.
743
744        Parameters:
745
746            row:          row no of integration. Default -1 return all rows
747
748            asdatetime:   return values as datetime objects rather than strings
749
750            prec:         number of digits shown. Default -1 to automatic calculation.
751                          Note this number is equals to the digits of MVTime,
752                          i.e., 0<prec<3: dates with hh:: only,
753                          <5: with hh:mm:, <7 or 0: with hh:mm:ss,
754                          and 6> : with hh:mm:ss.tt... (prec-6 t's added)
755
756        """
757        from datetime import datetime
758        if prec < 0:
759            # automagically set necessary precision +1
760            prec = 7 - numpy.floor(numpy.log10(min(self.get_inttime())))
761            prec = max(6, int(prec))
762        else:
763            prec = max(0, prec)
764        if asdatetime:
765            #precision can be 1 millisecond at max
766            prec = min(12, prec)
767
768        times = self._get_column(self._gettime, row, prec)
769        if not asdatetime:
770            return times
771        format = "%Y/%m/%d/%H:%M:%S.%f"
772        if prec < 7:
773            nsub = 1 + (((6-prec)/2) % 3)
774            substr = [".%f","%S","%M"]
775            for i in range(nsub):
776                format = format.replace(substr[i],"")
777        if isinstance(times, list):
778            return [datetime.strptime(i, format) for i in times]
779        else:
780            return datetime.strptime(times, format)
781
782
783    def get_inttime(self, row=-1):
784        """\
785        Get a list of integration times for the observations.
786        Return a time in seconds for each integration in the scantable.
787
788        Parameters:
789
790            row:    row no of integration. Default -1 return all rows.
791
792        """
793        return self._get_column(self._getinttime, row)
794
795
796    def get_sourcename(self, row=-1):
797        """\
798        Get a list source names for the observations.
799        Return a string for each integration in the scantable.
800        Parameters:
801
802            row:    row no of integration. Default -1 return all rows.
803
804        """
805        return self._get_column(self._getsourcename, row)
806
807    def get_elevation(self, row=-1):
808        """\
809        Get a list of elevations for the observations.
810        Return a float for each integration in the scantable.
811
812        Parameters:
813
814            row:    row no of integration. Default -1 return all rows.
815
816        """
817        return self._get_column(self._getelevation, row)
818
819    def get_azimuth(self, row=-1):
820        """\
821        Get a list of azimuths for the observations.
822        Return a float for each integration in the scantable.
823
824        Parameters:
825            row:    row no of integration. Default -1 return all rows.
826
827        """
828        return self._get_column(self._getazimuth, row)
829
830    def get_parangle(self, row=-1):
831        """\
832        Get a list of parallactic angles for the observations.
833        Return a float for each integration in the scantable.
834
835        Parameters:
836
837            row:    row no of integration. Default -1 return all rows.
838
839        """
840        return self._get_column(self._getparangle, row)
841
842    def get_direction(self, row=-1):
843        """
844        Get a list of Positions on the sky (direction) for the observations.
845        Return a string for each integration in the scantable.
846
847        Parameters:
848
849            row:    row no of integration. Default -1 return all rows
850
851        """
852        return self._get_column(self._getdirection, row)
853
854    def get_directionval(self, row=-1):
855        """\
856        Get a list of Positions on the sky (direction) for the observations.
857        Return a float for each integration in the scantable.
858
859        Parameters:
860
861            row:    row no of integration. Default -1 return all rows
862
863        """
864        return self._get_column(self._getdirectionvec, row)
865
866    @asaplog_post_dec
867    def set_unit(self, unit='channel'):
868        """\
869        Set the unit for all following operations on this scantable
870
871        Parameters:
872
873            unit:    optional unit, default is 'channel'. Use one of '*Hz',
874                     'km/s', 'channel' or equivalent ''
875
876        """
877        varlist = vars()
878        if unit in ['', 'pixel', 'channel']:
879            unit = ''
880        inf = list(self._getcoordinfo())
881        inf[0] = unit
882        self._setcoordinfo(inf)
883        self._add_history("set_unit", varlist)
884
885    @asaplog_post_dec
886    def set_instrument(self, instr):
887        """\
888        Set the instrument for subsequent processing.
889
890        Parameters:
891
892            instr:    Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
893                      'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
894
895        """
896        self._setInstrument(instr)
897        self._add_history("set_instument", vars())
898
899    @asaplog_post_dec
900    def set_feedtype(self, feedtype):
901        """\
902        Overwrite the feed type, which might not be set correctly.
903
904        Parameters:
905
906            feedtype:     'linear' or 'circular'
907
908        """
909        self._setfeedtype(feedtype)
910        self._add_history("set_feedtype", vars())
911
912    @asaplog_post_dec
913    def set_doppler(self, doppler='RADIO'):
914        """\
915        Set the doppler for all following operations on this scantable.
916
917        Parameters:
918
919            doppler:    One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
920
921        """
922        varlist = vars()
923        inf = list(self._getcoordinfo())
924        inf[2] = doppler
925        self._setcoordinfo(inf)
926        self._add_history("set_doppler", vars())
927
928    @asaplog_post_dec
929    def set_freqframe(self, frame=None):
930        """\
931        Set the frame type of the Spectral Axis.
932
933        Parameters:
934
935            frame:   an optional frame type, default 'LSRK'. Valid frames are:
936                     'TOPO', 'LSRD', 'LSRK', 'BARY',
937                     'GEO', 'GALACTO', 'LGROUP', 'CMB'
938
939        Example::
940
941            scan.set_freqframe('BARY')
942
943        """
944        frame = frame or rcParams['scantable.freqframe']
945        varlist = vars()
946        # "REST" is not implemented in casacore
947        #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
948        #           'GEO', 'GALACTO', 'LGROUP', 'CMB']
949        valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
950                   'GEO', 'GALACTO', 'LGROUP', 'CMB']
951
952        if frame in valid:
953            inf = list(self._getcoordinfo())
954            inf[1] = frame
955            self._setcoordinfo(inf)
956            self._add_history("set_freqframe", varlist)
957        else:
958            msg  = "Please specify a valid freq type. Valid types are:\n", valid
959            raise TypeError(msg)
960
961    @asaplog_post_dec
962    def set_dirframe(self, frame=""):
963        """\
964        Set the frame type of the Direction on the sky.
965
966        Parameters:
967
968            frame:   an optional frame type, default ''. Valid frames are:
969                     'J2000', 'B1950', 'GALACTIC'
970
971        Example:
972
973            scan.set_dirframe('GALACTIC')
974
975        """
976        varlist = vars()
977        Scantable.set_dirframe(self, frame)
978        self._add_history("set_dirframe", varlist)
979
980    def get_unit(self):
981        """\
982        Get the default unit set in this scantable
983
984        Returns:
985
986            A unit string
987
988        """
989        inf = self._getcoordinfo()
990        unit = inf[0]
991        if unit == '': unit = 'channel'
992        return unit
993
994    @asaplog_post_dec
995    def get_abcissa(self, rowno=0):
996        """\
997        Get the abcissa in the current coordinate setup for the currently
998        selected Beam/IF/Pol
999
1000        Parameters:
1001
1002            rowno:    an optional row number in the scantable. Default is the
1003                      first row, i.e. rowno=0
1004
1005        Returns:
1006
1007            The abcissa values and the format string (as a dictionary)
1008
1009        """
1010        abc = self._getabcissa(rowno)
1011        lbl = self._getabcissalabel(rowno)
1012        return abc, lbl
1013
1014    @asaplog_post_dec
1015    def flag(self, mask=None, unflag=False):
1016        """\
1017        Flag the selected data using an optional channel mask.
1018
1019        Parameters:
1020
1021            mask:   an optional channel mask, created with create_mask. Default
1022                    (no mask) is all channels.
1023
1024            unflag:    if True, unflag the data
1025
1026        """
1027        varlist = vars()
1028        mask = mask or []
1029        self._flag(mask, unflag)
1030        self._add_history("flag", varlist)
1031
1032    @asaplog_post_dec
1033    def flag_row(self, rows=[], unflag=False):
1034        """\
1035        Flag the selected data in row-based manner.
1036
1037        Parameters:
1038
1039            rows:   list of row numbers to be flagged. Default is no row
1040                    (must be explicitly specified to execute row-based flagging).
1041
1042            unflag: if True, unflag the data.
1043
1044        """
1045        varlist = vars()
1046        self._flag_row(rows, unflag)
1047        self._add_history("flag_row", varlist)
1048
1049    @asaplog_post_dec
1050    def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
1051        """\
1052        Flag the selected data outside a specified range (in channel-base)
1053
1054        Parameters:
1055
1056            uthres:      upper threshold.
1057
1058            dthres:      lower threshold
1059
1060            clipoutside: True for flagging data outside the range [dthres:uthres].
1061                         False for flagging data inside the range.
1062
1063            unflag:      if True, unflag the data.
1064
1065        """
1066        varlist = vars()
1067        self._clip(uthres, dthres, clipoutside, unflag)
1068        self._add_history("clip", varlist)
1069
1070    @asaplog_post_dec
1071    def lag_flag(self, start, end, unit="MHz", insitu=None):
1072        """\
1073        Flag the data in 'lag' space by providing a frequency to remove.
1074        Flagged data in the scantable gets interpolated over the region.
1075        No taper is applied.
1076
1077        Parameters:
1078
1079            start:    the start frequency (really a period within the
1080                      bandwidth)  or period to remove
1081
1082            end:      the end frequency or period to remove
1083
1084            unit:     the frequency unit (default "MHz") or "" for
1085                      explicit lag channels
1086
1087        *Notes*:
1088
1089            It is recommended to flag edges of the band or strong
1090            signals beforehand.
1091
1092        """
1093        if insitu is None: insitu = rcParams['insitu']
1094        self._math._setinsitu(insitu)
1095        varlist = vars()
1096        base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1097        if not (unit == "" or base.has_key(unit)):
1098            raise ValueError("%s is not a valid unit." % unit)
1099        if unit == "":
1100            s = scantable(self._math._lag_flag(self, start, end, "lags"))
1101        else:
1102            s = scantable(self._math._lag_flag(self, start*base[unit],
1103                                               end*base[unit], "frequency"))
1104        s._add_history("lag_flag", varlist)
1105        if insitu:
1106            self._assign(s)
1107        else:
1108            return s
1109
1110    @asaplog_post_dec
1111    def create_mask(self, *args, **kwargs):
1112        """\
1113        Compute and return a mask based on [min, max] windows.
1114        The specified windows are to be INCLUDED, when the mask is
1115        applied.
1116
1117        Parameters:
1118
1119            [min, max], [min2, max2], ...
1120                Pairs of start/end points (inclusive)specifying the regions
1121                to be masked
1122
1123            invert:     optional argument. If specified as True,
1124                        return an inverted mask, i.e. the regions
1125                        specified are EXCLUDED
1126
1127            row:        create the mask using the specified row for
1128                        unit conversions, default is row=0
1129                        only necessary if frequency varies over rows.
1130
1131        Examples::
1132
1133            scan.set_unit('channel')
1134            # a)
1135            msk = scan.create_mask([400, 500], [800, 900])
1136            # masks everything outside 400 and 500
1137            # and 800 and 900 in the unit 'channel'
1138
1139            # b)
1140            msk = scan.create_mask([400, 500], [800, 900], invert=True)
1141            # masks the regions between 400 and 500
1142            # and 800 and 900 in the unit 'channel'
1143
1144            # c)
1145            #mask only channel 400
1146            msk =  scan.create_mask([400])
1147
1148        """
1149        row = kwargs.get("row", 0)
1150        data = self._getabcissa(row)
1151        u = self._getcoordinfo()[0]
1152        if u == "":
1153            u = "channel"
1154        msg = "The current mask window unit is %s" % u
1155        i = self._check_ifs()
1156        if not i:
1157            msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1158        asaplog.push(msg)
1159        n = self.nchan()
1160        msk = _n_bools(n, False)
1161        # test if args is a 'list' or a 'normal *args - UGLY!!!
1162
1163        ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1164             and args or args[0]
1165        for window in ws:
1166            if len(window) == 1:
1167                window = [window[0], window[0]]
1168            if len(window) == 0 or  len(window) > 2:
1169                raise ValueError("A window needs to be defined as [start(, end)]")
1170            if window[0] > window[1]:
1171                tmp = window[0]
1172                window[0] = window[1]
1173                window[1] = tmp
1174            for i in range(n):
1175                if data[i] >= window[0] and data[i] <= window[1]:
1176                    msk[i] = True
1177        if kwargs.has_key('invert'):
1178            if kwargs.get('invert'):
1179                msk = mask_not(msk)
1180        return msk
1181
1182    def get_masklist(self, mask=None, row=0, silent=False):
1183        """\
1184        Compute and return a list of mask windows, [min, max].
1185
1186        Parameters:
1187
1188            mask:       channel mask, created with create_mask.
1189
1190            row:        calcutate the masklist using the specified row
1191                        for unit conversions, default is row=0
1192                        only necessary if frequency varies over rows.
1193
1194        Returns:
1195
1196            [min, max], [min2, max2], ...
1197                Pairs of start/end points (inclusive)specifying
1198                the masked regions
1199
1200        """
1201        if not (isinstance(mask,list) or isinstance(mask, tuple)):
1202            raise TypeError("The mask should be list or tuple.")
1203        if len(mask) < 2:
1204            raise TypeError("The mask elements should be > 1")
1205        if self.nchan() != len(mask):
1206            msg = "Number of channels in scantable != number of mask elements"
1207            raise TypeError(msg)
1208        data = self._getabcissa(row)
1209        u = self._getcoordinfo()[0]
1210        if u == "":
1211            u = "channel"
1212        msg = "The current mask window unit is %s" % u
1213        i = self._check_ifs()
1214        if not i:
1215            msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1216        if not silent:
1217            asaplog.push(msg)
1218        masklist=[]
1219        ist, ien = None, None
1220        ist, ien=self.get_mask_indices(mask)
1221        if ist is not None and ien is not None:
1222            for i in xrange(len(ist)):
1223                range=[data[ist[i]],data[ien[i]]]
1224                range.sort()
1225                masklist.append([range[0],range[1]])
1226        return masklist
1227
1228    def get_mask_indices(self, mask=None):
1229        """\
1230        Compute and Return lists of mask start indices and mask end indices.
1231
1232        Parameters:
1233
1234            mask:       channel mask, created with create_mask.
1235
1236        Returns:
1237
1238            List of mask start indices and that of mask end indices,
1239            i.e., [istart1,istart2,....], [iend1,iend2,....].
1240
1241        """
1242        if not (isinstance(mask,list) or isinstance(mask, tuple)):
1243            raise TypeError("The mask should be list or tuple.")
1244        if len(mask) < 2:
1245            raise TypeError("The mask elements should be > 1")
1246        istart=[]
1247        iend=[]
1248        if mask[0]: istart.append(0)
1249        for i in range(len(mask)-1):
1250            if not mask[i] and mask[i+1]:
1251                istart.append(i+1)
1252            elif mask[i] and not mask[i+1]:
1253                iend.append(i)
1254        if mask[len(mask)-1]: iend.append(len(mask)-1)
1255        if len(istart) != len(iend):
1256            raise RuntimeError("Numbers of mask start != mask end.")
1257        for i in range(len(istart)):
1258            if istart[i] > iend[i]:
1259                raise RuntimeError("Mask start index > mask end index")
1260                break
1261        return istart,iend
1262
1263#    def get_restfreqs(self):
1264#        """
1265#        Get the restfrequency(s) stored in this scantable.
1266#        The return value(s) are always of unit 'Hz'
1267#        Parameters:
1268#            none
1269#        Returns:
1270#            a list of doubles
1271#        """
1272#        return list(self._getrestfreqs())
1273
1274    def get_restfreqs(self, ids=None):
1275        """\
1276        Get the restfrequency(s) stored in this scantable.
1277        The return value(s) are always of unit 'Hz'
1278
1279        Parameters:
1280
1281            ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1282                 be retrieved
1283
1284        Returns:
1285
1286            dictionary containing ids and a list of doubles for each id
1287
1288        """
1289        if ids is None:
1290            rfreqs={}
1291            idlist = self.getmolnos()
1292            for i in idlist:
1293                rfreqs[i]=list(self._getrestfreqs(i))
1294            return rfreqs
1295        else:
1296            if type(ids)==list or type(ids)==tuple:
1297                rfreqs={}
1298                for i in ids:
1299                    rfreqs[i]=list(self._getrestfreqs(i))
1300                return rfreqs
1301            else:
1302                return list(self._getrestfreqs(ids))
1303            #return list(self._getrestfreqs(ids))
1304
1305    def set_restfreqs(self, freqs=None, unit='Hz'):
1306        """\
1307        Set or replace the restfrequency specified and
1308        if the 'freqs' argument holds a scalar,
1309        then that rest frequency will be applied to all the selected
1310        data.  If the 'freqs' argument holds
1311        a vector, then it MUST be of equal or smaller length than
1312        the number of IFs (and the available restfrequencies will be
1313        replaced by this vector).  In this case, *all* data have
1314        the restfrequency set per IF according
1315        to the corresponding value you give in the 'freqs' vector.
1316        E.g. 'freqs=[1e9, 2e9]'  would mean IF 0 gets restfreq 1e9 and
1317        IF 1 gets restfreq 2e9.
1318
1319        You can also specify the frequencies via a linecatalog.
1320
1321        Parameters:
1322
1323            freqs:   list of rest frequency values or string idenitfiers
1324
1325            unit:    unit for rest frequency (default 'Hz')
1326
1327
1328        Example::
1329
1330            # set the given restfrequency for the all currently selected IFs
1331            scan.set_restfreqs(freqs=1.4e9)
1332            # set restfrequencies for the n IFs  (n > 1) in the order of the
1333            # list, i.e
1334            # IF0 -> 1.4e9, IF1 ->  1.41e9, IF3 -> 1.42e9
1335            # len(list_of_restfreqs) == nIF
1336            # for nIF == 1 the following will set multiple restfrequency for
1337            # that IF
1338            scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
1339            # set multiple restfrequencies per IF. as a list of lists where
1340            # the outer list has nIF elements, the inner s arbitrary
1341            scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
1342
1343       *Note*:
1344
1345            To do more sophisticate Restfrequency setting, e.g. on a
1346            source and IF basis, use scantable.set_selection() before using
1347            this function::
1348
1349                # provided your scantable is called scan
1350                selection = selector()
1351                selection.set_name("ORION*")
1352                selection.set_ifs([1])
1353                scan.set_selection(selection)
1354                scan.set_restfreqs(freqs=86.6e9)
1355
1356        """
1357        varlist = vars()
1358        from asap import linecatalog
1359        # simple  value
1360        if isinstance(freqs, int) or isinstance(freqs, float):
1361            self._setrestfreqs([freqs], [""], unit)
1362        # list of values
1363        elif isinstance(freqs, list) or isinstance(freqs, tuple):
1364            # list values are scalars
1365            if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
1366                if len(freqs) == 1:
1367                    self._setrestfreqs(freqs, [""], unit)
1368                else:
1369                    # allow the 'old' mode of setting mulitple IFs
1370                    sel = selector()
1371                    savesel = self._getselection()
1372                    iflist = self.getifnos()
1373                    if len(freqs)>len(iflist):
1374                        raise ValueError("number of elements in list of list "
1375                                         "exeeds the current IF selections")
1376                    iflist = self.getifnos()
1377                    for i, fval in enumerate(freqs):
1378                        sel.set_ifs(iflist[i])
1379                        self._setselection(sel)
1380                        self._setrestfreqs([fval], [""], unit)
1381                    self._setselection(savesel)
1382
1383            # list values are dict, {'value'=, 'name'=)
1384            elif isinstance(freqs[-1], dict):
1385                values = []
1386                names = []
1387                for d in freqs:
1388                    values.append(d["value"])
1389                    names.append(d["name"])
1390                self._setrestfreqs(values, names, unit)
1391            elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
1392                sel = selector()
1393                savesel = self._getselection()
1394                iflist = self.getifnos()
1395                if len(freqs)>len(iflist):
1396                    raise ValueError("number of elements in list of list exeeds"
1397                                     " the current IF selections")
1398                for i, fval in enumerate(freqs):
1399                    sel.set_ifs(iflist[i])
1400                    self._setselection(sel)
1401                    self._setrestfreqs(fval, [""], unit)
1402                self._setselection(savesel)
1403        # freqs are to be taken from a linecatalog
1404        elif isinstance(freqs, linecatalog):
1405            sel = selector()
1406            savesel = self._getselection()
1407            for i in xrange(freqs.nrow()):
1408                sel.set_ifs(iflist[i])
1409                self._setselection(sel)
1410                self._setrestfreqs([freqs.get_frequency(i)],
1411                                   [freqs.get_name(i)], "MHz")
1412                # ensure that we are not iterating past nIF
1413                if i == self.nif()-1: break
1414            self._setselection(savesel)
1415        else:
1416            return
1417        self._add_history("set_restfreqs", varlist)
1418
1419    def shift_refpix(self, delta):
1420        """\
1421        Shift the reference pixel of the Spectra Coordinate by an
1422        integer amount.
1423
1424        Parameters:
1425
1426            delta:   the amount to shift by
1427
1428        *Note*:
1429
1430            Be careful using this with broadband data.
1431
1432        """
1433        Scantable.shift_refpix(self, delta)
1434
1435    @asaplog_post_dec
1436    def history(self, filename=None):
1437        """\
1438        Print the history. Optionally to a file.
1439
1440        Parameters:
1441
1442            filename:    The name of the file to save the history to.
1443
1444        """
1445        hist = list(self._gethistory())
1446        out = "-"*80
1447        for h in hist:
1448            if h.startswith("---"):
1449                out = "\n".join([out, h])
1450            else:
1451                items = h.split("##")
1452                date = items[0]
1453                func = items[1]
1454                items = items[2:]
1455                out += "\n"+date+"\n"
1456                out += "Function: %s\n  Parameters:" % (func)
1457                for i in items:
1458                    if i == '':
1459                        continue
1460                    s = i.split("=")
1461                    out += "\n   %s = %s" % (s[0], s[1])
1462                out = "\n".join([out, "-"*80])
1463        if filename is not None:
1464            if filename is "":
1465                filename = 'scantable_history.txt'
1466            import os
1467            filename = os.path.expandvars(os.path.expanduser(filename))
1468            if not os.path.isdir(filename):
1469                data = open(filename, 'w')
1470                data.write(out)
1471                data.close()
1472            else:
1473                msg = "Illegal file name '%s'." % (filename)
1474                raise IOError(msg)
1475        return page(out)
1476    #
1477    # Maths business
1478    #
1479    @asaplog_post_dec
1480    def average_time(self, mask=None, scanav=False, weight='tint', align=False):
1481        """\
1482        Return the (time) weighted average of a scan.
1483
1484        *Note*:
1485
1486            in channels only - align if necessary
1487
1488        Parameters:
1489
1490            mask:     an optional mask (only used for 'var' and 'tsys'
1491                      weighting)
1492
1493            scanav:   True averages each scan separately
1494                      False (default) averages all scans together,
1495
1496            weight:   Weighting scheme.
1497                      'none'     (mean no weight)
1498                      'var'      (1/var(spec) weighted)
1499                      'tsys'     (1/Tsys**2 weighted)
1500                      'tint'     (integration time weighted)
1501                      'tintsys'  (Tint/Tsys**2)
1502                      'median'   ( median averaging)
1503                      The default is 'tint'
1504
1505            align:    align the spectra in velocity before averaging. It takes
1506                      the time of the first spectrum as reference time.
1507
1508        Example::
1509
1510            # time average the scantable without using a mask
1511            newscan = scan.average_time()
1512
1513        """
1514        varlist = vars()
1515        weight = weight or 'TINT'
1516        mask = mask or ()
1517        scanav = (scanav and 'SCAN') or 'NONE'
1518        scan = (self, )
1519
1520        if align:
1521            scan = (self.freq_align(insitu=False), )
1522        s = None
1523        if weight.upper() == 'MEDIAN':
1524            s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1525                                                     scanav))
1526        else:
1527            s = scantable(self._math._average(scan, mask, weight.upper(),
1528                          scanav))
1529        s._add_history("average_time", varlist)
1530        return s
1531
1532    @asaplog_post_dec
1533    def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
1534        """\
1535        Return a scan where all spectra are converted to either
1536        Jansky or Kelvin depending upon the flux units of the scan table.
1537        By default the function tries to look the values up internally.
1538        If it can't find them (or if you want to over-ride), you must
1539        specify EITHER jyperk OR eta (and D which it will try to look up
1540        also if you don't set it). jyperk takes precedence if you set both.
1541
1542        Parameters:
1543
1544            jyperk:      the Jy / K conversion factor
1545
1546            eta:         the aperture efficiency
1547
1548            d:           the geometric diameter (metres)
1549
1550            insitu:      if False a new scantable is returned.
1551                         Otherwise, the scaling is done in-situ
1552                         The default is taken from .asaprc (False)
1553
1554        """
1555        if insitu is None: insitu = rcParams['insitu']
1556        self._math._setinsitu(insitu)
1557        varlist = vars()
1558        jyperk = jyperk or -1.0
1559        d = d or -1.0
1560        eta = eta or -1.0
1561        s = scantable(self._math._convertflux(self, d, eta, jyperk))
1562        s._add_history("convert_flux", varlist)
1563        if insitu: self._assign(s)
1564        else: return s
1565
1566    @asaplog_post_dec
1567    def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1568        """\
1569        Return a scan after applying a gain-elevation correction.
1570        The correction can be made via either a polynomial or a
1571        table-based interpolation (and extrapolation if necessary).
1572        You specify polynomial coefficients, an ascii table or neither.
1573        If you specify neither, then a polynomial correction will be made
1574        with built in coefficients known for certain telescopes (an error
1575        will occur if the instrument is not known).
1576        The data and Tsys are *divided* by the scaling factors.
1577
1578        Parameters:
1579
1580            poly:        Polynomial coefficients (default None) to compute a
1581                         gain-elevation correction as a function of
1582                         elevation (in degrees).
1583
1584            filename:    The name of an ascii file holding correction factors.
1585                         The first row of the ascii file must give the column
1586                         names and these MUST include columns
1587                         "ELEVATION" (degrees) and "FACTOR" (multiply data
1588                         by this) somewhere.
1589                         The second row must give the data type of the
1590                         column. Use 'R' for Real and 'I' for Integer.
1591                         An example file would be
1592                         (actual factors are arbitrary) :
1593
1594                         TIME ELEVATION FACTOR
1595                         R R R
1596                         0.1 0 0.8
1597                         0.2 20 0.85
1598                         0.3 40 0.9
1599                         0.4 60 0.85
1600                         0.5 80 0.8
1601                         0.6 90 0.75
1602
1603            method:      Interpolation method when correcting from a table.
1604                         Values are  "nearest", "linear" (default), "cubic"
1605                         and "spline"
1606
1607            insitu:      if False a new scantable is returned.
1608                         Otherwise, the scaling is done in-situ
1609                         The default is taken from .asaprc (False)
1610
1611        """
1612
1613        if insitu is None: insitu = rcParams['insitu']
1614        self._math._setinsitu(insitu)
1615        varlist = vars()
1616        poly = poly or ()
1617        from os.path import expandvars
1618        filename = expandvars(filename)
1619        s = scantable(self._math._gainel(self, poly, filename, method))
1620        s._add_history("gain_el", varlist)
1621        if insitu:
1622            self._assign(s)
1623        else:
1624            return s
1625
1626    @asaplog_post_dec
1627    def freq_align(self, reftime=None, method='cubic', insitu=None):
1628        """\
1629        Return a scan where all rows have been aligned in frequency/velocity.
1630        The alignment frequency frame (e.g. LSRK) is that set by function
1631        set_freqframe.
1632
1633        Parameters:
1634
1635            reftime:     reference time to align at. By default, the time of
1636                         the first row of data is used.
1637
1638            method:      Interpolation method for regridding the spectra.
1639                         Choose from "nearest", "linear", "cubic" (default)
1640                         and "spline"
1641
1642            insitu:      if False a new scantable is returned.
1643                         Otherwise, the scaling is done in-situ
1644                         The default is taken from .asaprc (False)
1645
1646        """
1647        if insitu is None: insitu = rcParams["insitu"]
1648        self._math._setinsitu(insitu)
1649        varlist = vars()
1650        reftime = reftime or ""
1651        s = scantable(self._math._freq_align(self, reftime, method))
1652        s._add_history("freq_align", varlist)
1653        if insitu: self._assign(s)
1654        else: return s
1655
1656    @asaplog_post_dec
1657    def opacity(self, tau=None, insitu=None):
1658        """\
1659        Apply an opacity correction. The data
1660        and Tsys are multiplied by the correction factor.
1661
1662        Parameters:
1663
1664            tau:         (list of) opacity from which the correction factor is
1665                         exp(tau*ZD)
1666                         where ZD is the zenith-distance.
1667                         If a list is provided, it has to be of length nIF,
1668                         nIF*nPol or 1 and in order of IF/POL, e.g.
1669                         [opif0pol0, opif0pol1, opif1pol0 ...]
1670                         if tau is `None` the opacities are determined from a
1671                         model.
1672
1673            insitu:      if False a new scantable is returned.
1674                         Otherwise, the scaling is done in-situ
1675                         The default is taken from .asaprc (False)
1676
1677        """
1678        if insitu is None: insitu = rcParams['insitu']
1679        self._math._setinsitu(insitu)
1680        varlist = vars()
1681        if not hasattr(tau, "__len__"):
1682            tau = [tau]
1683        s = scantable(self._math._opacity(self, tau))
1684        s._add_history("opacity", varlist)
1685        if insitu: self._assign(s)
1686        else: return s
1687
1688    @asaplog_post_dec
1689    def bin(self, width=5, insitu=None):
1690        """\
1691        Return a scan where all spectra have been binned up.
1692
1693        Parameters:
1694
1695            width:       The bin width (default=5) in pixels
1696
1697            insitu:      if False a new scantable is returned.
1698                         Otherwise, the scaling is done in-situ
1699                         The default is taken from .asaprc (False)
1700
1701        """
1702        if insitu is None: insitu = rcParams['insitu']
1703        self._math._setinsitu(insitu)
1704        varlist = vars()
1705        s = scantable(self._math._bin(self, width))
1706        s._add_history("bin", varlist)
1707        if insitu:
1708            self._assign(s)
1709        else:
1710            return s
1711
1712    @asaplog_post_dec
1713    def resample(self, width=5, method='cubic', insitu=None):
1714        """\
1715        Return a scan where all spectra have been binned up.
1716
1717        Parameters:
1718
1719            width:       The bin width (default=5) in pixels
1720
1721            method:      Interpolation method when correcting from a table.
1722                         Values are  "nearest", "linear", "cubic" (default)
1723                         and "spline"
1724
1725            insitu:      if False a new scantable is returned.
1726                         Otherwise, the scaling is done in-situ
1727                         The default is taken from .asaprc (False)
1728
1729        """
1730        if insitu is None: insitu = rcParams['insitu']
1731        self._math._setinsitu(insitu)
1732        varlist = vars()
1733        s = scantable(self._math._resample(self, method, width))
1734        s._add_history("resample", varlist)
1735        if insitu: self._assign(s)
1736        else: return s
1737
1738    @asaplog_post_dec
1739    def average_pol(self, mask=None, weight='none'):
1740        """\
1741        Average the Polarisations together.
1742
1743        Parameters:
1744
1745            mask:        An optional mask defining the region, where the
1746                         averaging will be applied. The output will have all
1747                         specified points masked.
1748
1749            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1750                         weighted), or 'tsys' (1/Tsys**2 weighted)
1751
1752        """
1753        varlist = vars()
1754        mask = mask or ()
1755        s = scantable(self._math._averagepol(self, mask, weight.upper()))
1756        s._add_history("average_pol", varlist)
1757        return s
1758
1759    @asaplog_post_dec
1760    def average_beam(self, mask=None, weight='none'):
1761        """\
1762        Average the Beams together.
1763
1764        Parameters:
1765            mask:        An optional mask defining the region, where the
1766                         averaging will be applied. The output will have all
1767                         specified points masked.
1768
1769            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1770                         weighted), or 'tsys' (1/Tsys**2 weighted)
1771
1772        """
1773        varlist = vars()
1774        mask = mask or ()
1775        s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1776        s._add_history("average_beam", varlist)
1777        return s
1778
1779    def parallactify(self, pflag):
1780        """\
1781        Set a flag to indicate whether this data should be treated as having
1782        been 'parallactified' (total phase == 0.0)
1783
1784        Parameters:
1785
1786            pflag:  Bool indicating whether to turn this on (True) or
1787                    off (False)
1788
1789        """
1790        varlist = vars()
1791        self._parallactify(pflag)
1792        self._add_history("parallactify", varlist)
1793
1794    @asaplog_post_dec
1795    def convert_pol(self, poltype=None):
1796        """\
1797        Convert the data to a different polarisation type.
1798        Note that you will need cross-polarisation terms for most conversions.
1799
1800        Parameters:
1801
1802            poltype:    The new polarisation type. Valid types are:
1803                        "linear", "circular", "stokes" and "linpol"
1804
1805        """
1806        varlist = vars()
1807        s = scantable(self._math._convertpol(self, poltype))
1808        s._add_history("convert_pol", varlist)
1809        return s
1810
1811    @asaplog_post_dec
1812    def smooth(self, kernel="hanning", width=5.0, order=2, plot=False, insitu=None):
1813        """\
1814        Smooth the spectrum by the specified kernel (conserving flux).
1815
1816        Parameters:
1817
1818            kernel:     The type of smoothing kernel. Select from
1819                        'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1820                        or 'poly'
1821
1822            width:      The width of the kernel in pixels. For hanning this is
1823                        ignored otherwise it defauls to 5 pixels.
1824                        For 'gaussian' it is the Full Width Half
1825                        Maximum. For 'boxcar' it is the full width.
1826                        For 'rmedian' and 'poly' it is the half width.
1827
1828            order:      Optional parameter for 'poly' kernel (default is 2), to
1829                        specify the order of the polnomial. Ignored by all other
1830                        kernels.
1831
1832            plot:       plot the original and the smoothed spectra.
1833                        In this each indivual fit has to be approved, by
1834                        typing 'y' or 'n'
1835
1836            insitu:     if False a new scantable is returned.
1837                        Otherwise, the scaling is done in-situ
1838                        The default is taken from .asaprc (False)
1839
1840        """
1841        if insitu is None: insitu = rcParams['insitu']
1842        self._math._setinsitu(insitu)
1843        varlist = vars()
1844
1845        if plot: orgscan = self.copy()
1846
1847        s = scantable(self._math._smooth(self, kernel.lower(), width, order))
1848        s._add_history("smooth", varlist)
1849
1850        if plot:
1851            if rcParams['plotter.gui']:
1852                from asap.asaplotgui import asaplotgui as asaplot
1853            else:
1854                from asap.asaplot import asaplot
1855            self._p=asaplot()
1856            self._p.set_panels()
1857            ylab=s._get_ordinate_label()
1858            #self._p.palette(0,["#777777","red"])
1859            for r in xrange(s.nrow()):
1860                xsm=s._getabcissa(r)
1861                ysm=s._getspectrum(r)
1862                xorg=orgscan._getabcissa(r)
1863                yorg=orgscan._getspectrum(r)
1864                self._p.clear()
1865                self._p.hold()
1866                self._p.set_axes('ylabel',ylab)
1867                self._p.set_axes('xlabel',s._getabcissalabel(r))
1868                self._p.set_axes('title',s._getsourcename(r))
1869                self._p.set_line(label='Original',color="#777777")
1870                self._p.plot(xorg,yorg)
1871                self._p.set_line(label='Smoothed',color="red")
1872                self._p.plot(xsm,ysm)
1873                ### Ugly part for legend
1874                for i in [0,1]:
1875                    self._p.subplots[0]['lines'].append([self._p.subplots[0]['axes'].lines[i]])
1876                self._p.release()
1877                ### Ugly part for legend
1878                self._p.subplots[0]['lines']=[]
1879                res = raw_input("Accept smoothing ([y]/n): ")
1880                if res.upper() == 'N':
1881                    s._setspectrum(yorg, r)
1882            self._p.unmap()
1883            self._p = None
1884            del orgscan
1885
1886        if insitu: self._assign(s)
1887        else: return s
1888
1889    @asaplog_post_dec
1890    def old_poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None, rows=None):
1891        """\
1892        Return a scan which has been baselined (all rows) by a polynomial.
1893       
1894        Parameters:
1895
1896            mask:       an optional mask
1897
1898            order:      the order of the polynomial (default is 0)
1899
1900            plot:       plot the fit and the residual. In this each
1901                        indivual fit has to be approved, by typing 'y'
1902                        or 'n'
1903
1904            uselin:     use linear polynomial fit
1905
1906            insitu:     if False a new scantable is returned.
1907                        Otherwise, the scaling is done in-situ
1908                        The default is taken from .asaprc (False)
1909
1910            rows:       row numbers of spectra to be processed.
1911                        (default is None: for all rows)
1912       
1913        Example:
1914            # return a scan baselined by a third order polynomial,
1915            # not using a mask
1916            bscan = scan.poly_baseline(order=3)
1917
1918        """
1919        if insitu is None: insitu = rcParams['insitu']
1920        if not insitu:
1921            workscan = self.copy()
1922        else:
1923            workscan = self
1924        varlist = vars()
1925        if mask is None:
1926            mask = [True for i in xrange(self.nchan())]
1927
1928        try:
1929            f = fitter()
1930            if uselin:
1931                f.set_function(lpoly=order)
1932            else:
1933                f.set_function(poly=order)
1934
1935            if rows == None:
1936                rows = xrange(workscan.nrow())
1937            elif isinstance(rows, int):
1938                rows = [ rows ]
1939           
1940            if len(rows) > 0:
1941                self.blpars = []
1942                self.masklists = []
1943                self.actualmask = []
1944           
1945            for r in rows:
1946                f.x = workscan._getabcissa(r)
1947                f.y = workscan._getspectrum(r)
1948                f.mask = mask_and(mask, workscan._getmask(r))    # (CAS-1434)
1949                f.data = None
1950                f.fit()
1951                if plot:
1952                    f.plot(residual=True)
1953                    x = raw_input("Accept fit ( [y]/n ): ")
1954                    if x.upper() == 'N':
1955                        self.blpars.append(None)
1956                        self.masklists.append(None)
1957                        self.actualmask.append(None)
1958                        continue
1959                workscan._setspectrum(f.fitter.getresidual(), r)
1960                self.blpars.append(f.get_parameters())
1961                self.masklists.append(workscan.get_masklist(f.mask, row=r, silent=True))
1962                self.actualmask.append(f.mask)
1963
1964            if plot:
1965                f._p.unmap()
1966                f._p = None
1967            workscan._add_history("poly_baseline", varlist)
1968            if insitu:
1969                self._assign(workscan)
1970            else:
1971                return workscan
1972        except RuntimeError:
1973            msg = "The fit failed, possibly because it didn't converge."
1974            raise RuntimeError(msg)
1975
1976    @asaplog_post_dec
1977    def poly_baseline(self, mask=None, order=0, plot=False, batch=False, insitu=None, rows=None):
1978        """\
1979        Return a scan which has been baselined (all rows) by a polynomial.
1980        Parameters:
1981            mask:       an optional mask
1982            order:      the order of the polynomial (default is 0)
1983            plot:       plot the fit and the residual. In this each
1984                        indivual fit has to be approved, by typing 'y'
1985                        or 'n'. Ignored if batch = True.
1986            batch:      if True a faster algorithm is used and logs
1987                        including the fit results are not output
1988                        (default is False)
1989            insitu:     if False a new scantable is returned.
1990                        Otherwise, the scaling is done in-situ
1991                        The default is taken from .asaprc (False)
1992            rows:       row numbers of spectra to be baselined.
1993                        (default is None: for all rows)
1994        Example:
1995            # return a scan baselined by a third order polynomial,
1996            # not using a mask
1997            bscan = scan.poly_baseline(order=3)
1998        """
1999       
2000        varlist = vars()
2001       
2002        if insitu is None: insitu = rcParams["insitu"]
2003        if insitu:
2004            workscan = self
2005        else:
2006            workscan = self.copy()
2007
2008        nchan = workscan.nchan()
2009       
2010        if mask is None:
2011            mask = [True for i in xrange(nchan)]
2012
2013        try:
2014            if rows == None:
2015                rows = xrange(workscan.nrow())
2016            elif isinstance(rows, int):
2017                rows = [ rows ]
2018           
2019            if len(rows) > 0:
2020                workscan.blpars = []
2021                workscan.masklists = []
2022                workscan.actualmask = []
2023
2024            if batch:
2025                workscan._poly_baseline_batch(mask, order)
2026            elif plot:
2027                f = fitter()
2028                f.set_function(lpoly=order)
2029                for r in rows:
2030                    f.x = workscan._getabcissa(r)
2031                    f.y = workscan._getspectrum(r)
2032                    f.mask = mask_and(mask, workscan._getmask(r))    # (CAS-1434)
2033                    f.data = None
2034                    f.fit()
2035                   
2036                    f.plot(residual=True)
2037                    accept_fit = raw_input("Accept fit ( [y]/n ): ")
2038                    if accept_fit.upper() == "N":
2039                        self.blpars.append(None)
2040                        self.masklists.append(None)
2041                        self.actualmask.append(None)
2042                        continue
2043                    workscan._setspectrum(f.fitter.getresidual(), r)
2044                    workscan.blpars.append(f.get_parameters())
2045                    workscan.masklists.append(workscan.get_masklist(f.mask, row=r))
2046                    workscan.actualmask.append(f.mask)
2047                   
2048                f._p.unmap()
2049                f._p = None
2050            else:
2051                for r in rows:
2052                    fitparams = workscan._poly_baseline(mask, order, r)
2053                    params = fitparams.getparameters()
2054                    fmtd = ", ".join(["p%d = %3.6f" % (i, v) for i, v in enumerate(params)])
2055                    errors = fitparams.geterrors()
2056                    fmask = mask_and(mask, workscan._getmask(r))
2057
2058                    workscan.blpars.append({"params":params,
2059                                            "fixed": fitparams.getfixedparameters(),
2060                                            "formatted":fmtd, "errors":errors})
2061                    workscan.masklists.append(workscan.get_masklist(fmask, r, silent=True))
2062                    workscan.actualmask.append(fmask)
2063                   
2064                    asaplog.push(fmtd)
2065           
2066            workscan._add_history("poly_baseline", varlist)
2067           
2068            if insitu:
2069                self._assign(workscan)
2070            else:
2071                return workscan
2072           
2073        except RuntimeError, e:
2074            msg = "The fit failed, possibly because it didn't converge."
2075            if rcParams["verbose"]:
2076                asaplog.push(str(e))
2077                asaplog.push(str(msg))
2078                return
2079            else:
2080                raise RuntimeError(str(e)+'\n'+msg)
2081
2082
2083    def auto_poly_baseline(self, mask=None, edge=(0, 0), order=0,
2084                           threshold=3, chan_avg_limit=1, plot=False,
2085                           insitu=None, rows=None):
2086        """\
2087        Return a scan which has been baselined (all rows) by a polynomial.
2088        Spectral lines are detected first using linefinder and masked out
2089        to avoid them affecting the baseline solution.
2090
2091        Parameters:
2092
2093            mask:       an optional mask retreived from scantable
2094
2095            edge:       an optional number of channel to drop at the edge of
2096                        spectrum. If only one value is
2097                        specified, the same number will be dropped from
2098                        both sides of the spectrum. Default is to keep
2099                        all channels. Nested tuples represent individual
2100                        edge selection for different IFs (a number of spectral
2101                        channels can be different)
2102
2103            order:      the order of the polynomial (default is 0)
2104
2105            threshold:  the threshold used by line finder. It is better to
2106                        keep it large as only strong lines affect the
2107                        baseline solution.
2108
2109            chan_avg_limit:
2110                        a maximum number of consequtive spectral channels to
2111                        average during the search of weak and broad lines.
2112                        The default is no averaging (and no search for weak
2113                        lines). If such lines can affect the fitted baseline
2114                        (e.g. a high order polynomial is fitted), increase this
2115                        parameter (usually values up to 8 are reasonable). Most
2116                        users of this method should find the default value
2117                        sufficient.
2118
2119            plot:       plot the fit and the residual. In this each
2120                        indivual fit has to be approved, by typing 'y'
2121                        or 'n'
2122
2123            insitu:     if False a new scantable is returned.
2124                        Otherwise, the scaling is done in-situ
2125                        The default is taken from .asaprc (False)
2126            rows:       row numbers of spectra to be processed.
2127                        (default is None: for all rows)
2128
2129
2130        Example::
2131
2132            scan2 = scan.auto_poly_baseline(order=7, insitu=False)
2133
2134        """
2135        if insitu is None: insitu = rcParams['insitu']
2136        varlist = vars()
2137        from asap.asaplinefind import linefinder
2138        from asap import _is_sequence_or_number as _is_valid
2139
2140        # check whether edge is set up for each IF individually
2141        individualedge = False;
2142        if len(edge) > 1:
2143            if isinstance(edge[0], list) or isinstance(edge[0], tuple):
2144                individualedge = True;
2145
2146        if not _is_valid(edge, int) and not individualedge:
2147            raise ValueError, "Parameter 'edge' has to be an integer or a \
2148            pair of integers specified as a tuple. Nested tuples are allowed \
2149            to make individual selection for different IFs."
2150
2151        curedge = (0, 0)
2152        if individualedge:
2153            for edgepar in edge:
2154                if not _is_valid(edgepar, int):
2155                    raise ValueError, "Each element of the 'edge' tuple has \
2156                                       to be a pair of integers or an integer."
2157        else:
2158            curedge = edge;
2159
2160        if not insitu:
2161            workscan = self.copy()
2162        else:
2163            workscan = self
2164
2165        # setup fitter
2166        f = fitter()
2167        f.set_function(lpoly=order)
2168
2169        # setup line finder
2170        fl = linefinder()
2171        fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
2172
2173        fl.set_scan(workscan)
2174
2175        if mask is None:
2176            mask = _n_bools(workscan.nchan(), True)
2177       
2178        if rows is None:
2179            rows = xrange(workscan.nrow())
2180        elif isinstance(rows, int):
2181            rows = [ rows ]
2182       
2183        # Save parameters of baseline fits & masklists as a class attribute.
2184        # NOTICE: It does not reflect changes in scantable!
2185        if len(rows) > 0:
2186            self.blpars=[]
2187            self.masklists=[]
2188            self.actualmask=[]
2189        asaplog.push("Processing:")
2190        for r in rows:
2191            msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
2192                (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
2193                 workscan.getpol(r), workscan.getcycle(r))
2194            asaplog.push(msg, False)
2195
2196            # figure out edge parameter
2197            if individualedge:
2198                if len(edge) >= workscan.getif(r):
2199                    raise RuntimeError, "Number of edge elements appear to " \
2200                                        "be less than the number of IFs"
2201                    curedge = edge[workscan.getif(r)]
2202
2203            actualmask = mask_and(mask, workscan._getmask(r))    # (CAS-1434)
2204
2205            # setup line finder
2206            fl.find_lines(r, actualmask, curedge)
2207           
2208            f.x = workscan._getabcissa(r)
2209            f.y = workscan._getspectrum(r)
2210            f.mask = fl.get_mask()
2211            f.data = None
2212            f.fit()
2213
2214            # Show mask list
2215            masklist=workscan.get_masklist(f.mask, row=r, silent=True)
2216            msg = "mask range: "+str(masklist)
2217            asaplog.push(msg, False)
2218
2219            if plot:
2220                f.plot(residual=True)
2221                x = raw_input("Accept fit ( [y]/n ): ")
2222                if x.upper() == 'N':
2223                    self.blpars.append(None)
2224                    self.masklists.append(None)
2225                    self.actualmask.append(None)
2226                    continue
2227
2228            workscan._setspectrum(f.fitter.getresidual(), r)
2229            self.blpars.append(f.get_parameters())
2230            self.masklists.append(masklist)
2231            self.actualmask.append(f.mask)
2232        if plot:
2233            f._p.unmap()
2234            f._p = None
2235        workscan._add_history("auto_poly_baseline", varlist)
2236        if insitu:
2237            self._assign(workscan)
2238        else:
2239            return workscan
2240
2241    @asaplog_post_dec
2242    def rotate_linpolphase(self, angle):
2243        """\
2244        Rotate the phase of the complex polarization O=Q+iU correlation.
2245        This is always done in situ in the raw data.  So if you call this
2246        function more than once then each call rotates the phase further.
2247
2248        Parameters:
2249
2250            angle:   The angle (degrees) to rotate (add) by.
2251
2252        Example::
2253
2254            scan.rotate_linpolphase(2.3)
2255
2256        """
2257        varlist = vars()
2258        self._math._rotate_linpolphase(self, angle)
2259        self._add_history("rotate_linpolphase", varlist)
2260        return
2261
2262    @asaplog_post_dec
2263    def rotate_xyphase(self, angle):
2264        """\
2265        Rotate the phase of the XY correlation.  This is always done in situ
2266        in the data.  So if you call this function more than once
2267        then each call rotates the phase further.
2268
2269        Parameters:
2270
2271            angle:   The angle (degrees) to rotate (add) by.
2272
2273        Example::
2274
2275            scan.rotate_xyphase(2.3)
2276
2277        """
2278        varlist = vars()
2279        self._math._rotate_xyphase(self, angle)
2280        self._add_history("rotate_xyphase", varlist)
2281        return
2282
2283    @asaplog_post_dec
2284    def swap_linears(self):
2285        """\
2286        Swap the linear polarisations XX and YY, or better the first two
2287        polarisations as this also works for ciculars.
2288        """
2289        varlist = vars()
2290        self._math._swap_linears(self)
2291        self._add_history("swap_linears", varlist)
2292        return
2293
2294    @asaplog_post_dec
2295    def invert_phase(self):
2296        """\
2297        Invert the phase of the complex polarisation
2298        """
2299        varlist = vars()
2300        self._math._invert_phase(self)
2301        self._add_history("invert_phase", varlist)
2302        return
2303
2304    @asaplog_post_dec
2305    def add(self, offset, insitu=None):
2306        """\
2307        Return a scan where all spectra have the offset added
2308
2309        Parameters:
2310
2311            offset:      the offset
2312
2313            insitu:      if False a new scantable is returned.
2314                         Otherwise, the scaling is done in-situ
2315                         The default is taken from .asaprc (False)
2316
2317        """
2318        if insitu is None: insitu = rcParams['insitu']
2319        self._math._setinsitu(insitu)
2320        varlist = vars()
2321        s = scantable(self._math._unaryop(self, offset, "ADD", False))
2322        s._add_history("add", varlist)
2323        if insitu:
2324            self._assign(s)
2325        else:
2326            return s
2327
2328    @asaplog_post_dec
2329    def scale(self, factor, tsys=True, insitu=None):
2330        """\
2331
2332        Return a scan where all spectra are scaled by the given 'factor'
2333
2334        Parameters:
2335
2336            factor:      the scaling factor (float or 1D float list)
2337
2338            insitu:      if False a new scantable is returned.
2339                         Otherwise, the scaling is done in-situ
2340                         The default is taken from .asaprc (False)
2341
2342            tsys:        if True (default) then apply the operation to Tsys
2343                         as well as the data
2344
2345        """
2346        if insitu is None: insitu = rcParams['insitu']
2347        self._math._setinsitu(insitu)
2348        varlist = vars()
2349        s = None
2350        import numpy
2351        if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
2352            if isinstance(factor[0], list) or isinstance(factor[0], numpy.ndarray):
2353                from asapmath import _array2dOp
2354                s = _array2dOp( self.copy(), factor, "MUL", tsys )
2355            else:
2356                s = scantable( self._math._arrayop( self.copy(), factor, "MUL", tsys ) )
2357        else:
2358            s = scantable(self._math._unaryop(self.copy(), factor, "MUL", tsys))
2359        s._add_history("scale", varlist)
2360        if insitu:
2361            self._assign(s)
2362        else:
2363            return s
2364
2365    def set_sourcetype(self, match, matchtype="pattern",
2366                       sourcetype="reference"):
2367        """\
2368        Set the type of the source to be an source or reference scan
2369        using the provided pattern.
2370
2371        Parameters:
2372
2373            match:          a Unix style pattern, regular expression or selector
2374
2375            matchtype:      'pattern' (default) UNIX style pattern or
2376                            'regex' regular expression
2377
2378            sourcetype:     the type of the source to use (source/reference)
2379
2380        """
2381        varlist = vars()
2382        basesel = self.get_selection()
2383        stype = -1
2384        if sourcetype.lower().startswith("r"):
2385            stype = 1
2386        elif sourcetype.lower().startswith("s"):
2387            stype = 0
2388        else:
2389            raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
2390        if matchtype.lower().startswith("p"):
2391            matchtype = "pattern"
2392        elif matchtype.lower().startswith("r"):
2393            matchtype = "regex"
2394        else:
2395            raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
2396        sel = selector()
2397        if isinstance(match, selector):
2398            sel = match
2399        else:
2400            sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
2401        self.set_selection(basesel+sel)
2402        self._setsourcetype(stype)
2403        self.set_selection(basesel)
2404        self._add_history("set_sourcetype", varlist)
2405
2406    @asaplog_post_dec
2407    @preserve_selection
2408    def auto_quotient(self, preserve=True, mode='paired', verify=False):
2409        """\
2410        This function allows to build quotients automatically.
2411        It assumes the observation to have the same number of
2412        "ons" and "offs"
2413
2414        Parameters:
2415
2416            preserve:       you can preserve (default) the continuum or
2417                            remove it.  The equations used are
2418
2419                            preserve: Output = Toff * (on/off) - Toff
2420
2421                            remove:   Output = Toff * (on/off) - Ton
2422
2423            mode:           the on/off detection mode
2424                            'paired' (default)
2425                            identifies 'off' scans by the
2426                            trailing '_R' (Mopra/Parkes) or
2427                            '_e'/'_w' (Tid) and matches
2428                            on/off pairs from the observing pattern
2429                            'time'
2430                            finds the closest off in time
2431
2432        .. todo:: verify argument is not implemented
2433
2434        """
2435        varlist = vars()
2436        modes = ["time", "paired"]
2437        if not mode in modes:
2438            msg = "please provide valid mode. Valid modes are %s" % (modes)
2439            raise ValueError(msg)
2440        s = None
2441        if mode.lower() == "paired":
2442            sel = self.get_selection()
2443            sel.set_query("SRCTYPE==psoff")
2444            self.set_selection(sel)
2445            offs = self.copy()
2446            sel.set_query("SRCTYPE==pson")
2447            self.set_selection(sel)
2448            ons = self.copy()
2449            s = scantable(self._math._quotient(ons, offs, preserve))
2450        elif mode.lower() == "time":
2451            s = scantable(self._math._auto_quotient(self, mode, preserve))
2452        s._add_history("auto_quotient", varlist)
2453        return s
2454
2455    @asaplog_post_dec
2456    def mx_quotient(self, mask = None, weight='median', preserve=True):
2457        """\
2458        Form a quotient using "off" beams when observing in "MX" mode.
2459
2460        Parameters:
2461
2462            mask:           an optional mask to be used when weight == 'stddev'
2463
2464            weight:         How to average the off beams.  Default is 'median'.
2465
2466            preserve:       you can preserve (default) the continuum or
2467                            remove it.  The equations used are:
2468
2469                                preserve: Output = Toff * (on/off) - Toff
2470
2471                                remove:   Output = Toff * (on/off) - Ton
2472
2473        """
2474        mask = mask or ()
2475        varlist = vars()
2476        on = scantable(self._math._mx_extract(self, 'on'))
2477        preoff = scantable(self._math._mx_extract(self, 'off'))
2478        off = preoff.average_time(mask=mask, weight=weight, scanav=False)
2479        from asapmath  import quotient
2480        q = quotient(on, off, preserve)
2481        q._add_history("mx_quotient", varlist)
2482        return q
2483
2484    @asaplog_post_dec
2485    def freq_switch(self, insitu=None):
2486        """\
2487        Apply frequency switching to the data.
2488
2489        Parameters:
2490
2491            insitu:      if False a new scantable is returned.
2492                         Otherwise, the swictching is done in-situ
2493                         The default is taken from .asaprc (False)
2494
2495        """
2496        if insitu is None: insitu = rcParams['insitu']
2497        self._math._setinsitu(insitu)
2498        varlist = vars()
2499        s = scantable(self._math._freqswitch(self))
2500        s._add_history("freq_switch", varlist)
2501        if insitu:
2502            self._assign(s)
2503        else:
2504            return s
2505
2506    @asaplog_post_dec
2507    def recalc_azel(self):
2508        """Recalculate the azimuth and elevation for each position."""
2509        varlist = vars()
2510        self._recalcazel()
2511        self._add_history("recalc_azel", varlist)
2512        return
2513
2514    @asaplog_post_dec
2515    def __add__(self, other):
2516        varlist = vars()
2517        s = None
2518        if isinstance(other, scantable):
2519            s = scantable(self._math._binaryop(self, other, "ADD"))
2520        elif isinstance(other, float):
2521            s = scantable(self._math._unaryop(self, other, "ADD", False))
2522        else:
2523            raise TypeError("Other input is not a scantable or float value")
2524        s._add_history("operator +", varlist)
2525        return s
2526
2527    @asaplog_post_dec
2528    def __sub__(self, other):
2529        """
2530        implicit on all axes and on Tsys
2531        """
2532        varlist = vars()
2533        s = None
2534        if isinstance(other, scantable):
2535            s = scantable(self._math._binaryop(self, other, "SUB"))
2536        elif isinstance(other, float):
2537            s = scantable(self._math._unaryop(self, other, "SUB", False))
2538        else:
2539            raise TypeError("Other input is not a scantable or float value")
2540        s._add_history("operator -", varlist)
2541        return s
2542
2543    @asaplog_post_dec
2544    def __mul__(self, other):
2545        """
2546        implicit on all axes and on Tsys
2547        """
2548        varlist = vars()
2549        s = None
2550        if isinstance(other, scantable):
2551            s = scantable(self._math._binaryop(self, other, "MUL"))
2552        elif isinstance(other, float):
2553            s = scantable(self._math._unaryop(self, other, "MUL", False))
2554        else:
2555            raise TypeError("Other input is not a scantable or float value")
2556        s._add_history("operator *", varlist)
2557        return s
2558
2559
2560    @asaplog_post_dec
2561    def __div__(self, other):
2562        """
2563        implicit on all axes and on Tsys
2564        """
2565        varlist = vars()
2566        s = None
2567        if isinstance(other, scantable):
2568            s = scantable(self._math._binaryop(self, other, "DIV"))
2569        elif isinstance(other, float):
2570            if other == 0.0:
2571                raise ZeroDivisionError("Dividing by zero is not recommended")
2572            s = scantable(self._math._unaryop(self, other, "DIV", False))
2573        else:
2574            raise TypeError("Other input is not a scantable or float value")
2575        s._add_history("operator /", varlist)
2576        return s
2577
2578    @asaplog_post_dec
2579    def get_fit(self, row=0):
2580        """\
2581        Print or return the stored fits for a row in the scantable
2582
2583        Parameters:
2584
2585            row:    the row which the fit has been applied to.
2586
2587        """
2588        if row > self.nrow():
2589            return
2590        from asap.asapfit import asapfit
2591        fit = asapfit(self._getfit(row))
2592        asaplog.push( '%s' %(fit) )
2593        return fit.as_dict()
2594
2595    def flag_nans(self):
2596        """\
2597        Utility function to flag NaN values in the scantable.
2598        """
2599        import numpy
2600        basesel = self.get_selection()
2601        for i in range(self.nrow()):
2602            sel = self.get_row_selector(i)
2603            self.set_selection(basesel+sel)
2604            nans = numpy.isnan(self._getspectrum(0))
2605        if numpy.any(nans):
2606            bnans = [ bool(v) for v in nans]
2607            self.flag(bnans)
2608        self.set_selection(basesel)
2609
2610    def get_row_selector(self, rowno):
2611        return selector(beams=self.getbeam(rowno),
2612                        ifs=self.getif(rowno),
2613                        pols=self.getpol(rowno),
2614                        scans=self.getscan(rowno),
2615                        cycles=self.getcycle(rowno))
2616
2617    def _add_history(self, funcname, parameters):
2618        if not rcParams['scantable.history']:
2619            return
2620        # create date
2621        sep = "##"
2622        from datetime import datetime
2623        dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
2624        hist = dstr+sep
2625        hist += funcname+sep#cdate+sep
2626        if parameters.has_key('self'): del parameters['self']
2627        for k, v in parameters.iteritems():
2628            if type(v) is dict:
2629                for k2, v2 in v.iteritems():
2630                    hist += k2
2631                    hist += "="
2632                    if isinstance(v2, scantable):
2633                        hist += 'scantable'
2634                    elif k2 == 'mask':
2635                        if isinstance(v2, list) or isinstance(v2, tuple):
2636                            hist += str(self._zip_mask(v2))
2637                        else:
2638                            hist += str(v2)
2639                    else:
2640                        hist += str(v2)
2641            else:
2642                hist += k
2643                hist += "="
2644                if isinstance(v, scantable):
2645                    hist += 'scantable'
2646                elif k == 'mask':
2647                    if isinstance(v, list) or isinstance(v, tuple):
2648                        hist += str(self._zip_mask(v))
2649                    else:
2650                        hist += str(v)
2651                else:
2652                    hist += str(v)
2653            hist += sep
2654        hist = hist[:-2] # remove trailing '##'
2655        self._addhistory(hist)
2656
2657
2658    def _zip_mask(self, mask):
2659        mask = list(mask)
2660        i = 0
2661        segments = []
2662        while mask[i:].count(1):
2663            i += mask[i:].index(1)
2664            if mask[i:].count(0):
2665                j = i + mask[i:].index(0)
2666            else:
2667                j = len(mask)
2668            segments.append([i, j])
2669            i = j
2670        return segments
2671
2672    def _get_ordinate_label(self):
2673        fu = "("+self.get_fluxunit()+")"
2674        import re
2675        lbl = "Intensity"
2676        if re.match(".K.", fu):
2677            lbl = "Brightness Temperature "+ fu
2678        elif re.match(".Jy.", fu):
2679            lbl = "Flux density "+ fu
2680        return lbl
2681
2682    def _check_ifs(self):
2683        nchans = [self.nchan(i) for i in range(self.nif(-1))]
2684        nchans = filter(lambda t: t > 0, nchans)
2685        return (sum(nchans)/len(nchans) == nchans[0])
2686
2687    @asaplog_post_dec
2688    #def _fill(self, names, unit, average, getpt, antenna):
2689    def _fill(self, names, unit, average, opts={}):
2690        first = True
2691        fullnames = []
2692        for name in names:
2693            name = os.path.expandvars(name)
2694            name = os.path.expanduser(name)
2695            if not os.path.exists(name):
2696                msg = "File '%s' does not exists" % (name)
2697                raise IOError(msg)
2698            fullnames.append(name)
2699        if average:
2700            asaplog.push('Auto averaging integrations')
2701        stype = int(rcParams['scantable.storage'].lower() == 'disk')
2702        for name in fullnames:
2703            tbl = Scantable(stype)
2704            r = filler(tbl)
2705            rx = rcParams['scantable.reference']
2706            r.setreferenceexpr(rx)
2707            msg = "Importing %s..." % (name)
2708            asaplog.push(msg, False)
2709            #opts = {'ms': {'antenna' : antenna, 'getpt': getpt} }
2710            r.open(name, opts)# antenna, -1, -1, getpt)
2711            r.fill()
2712            if average:
2713                tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
2714            if not first:
2715                tbl = self._math._merge([self, tbl])
2716            Scantable.__init__(self, tbl)
2717            r.close()
2718            del r, tbl
2719            first = False
2720            #flush log
2721        asaplog.post()
2722        if unit is not None:
2723            self.set_fluxunit(unit)
2724        if not is_casapy():
2725            self.set_freqframe(rcParams['scantable.freqframe'])
2726
2727    def __getitem__(self, key):
2728        if key < 0:
2729            key += self.nrow()
2730        if key >= self.nrow():
2731            raise IndexError("Row index out of range.")
2732        return self._getspectrum(key)
2733
2734    def __setitem__(self, key, value):
2735        if key < 0:
2736            key += self.nrow()
2737        if key >= self.nrow():
2738            raise IndexError("Row index out of range.")
2739        if not hasattr(value, "__len__") or \
2740                len(value) > self.nchan(self.getif(key)):
2741            raise ValueError("Spectrum length doesn't match.")
2742        return self._setspectrum(value, key)
2743
2744    def __len__(self):
2745        return self.nrow()
2746
2747    def __iter__(self):
2748        for i in range(len(self)):
2749            yield self[i]
Note: See TracBrowser for help on using the repository browser.