source: trunk/python/scantable.py @ 2186

Last change on this file since 2186 was 2186, checked in by WataruKawasaki, 13 years ago

New Development: Yes

JIRA Issue: Yes CAS-3149

Ready for Test: Yes

Interface Changes: Yes

What Interface Changed: scantable.*sinusoid_baseline() params

Test Programs:

Put in Release Notes: Yes

Module(s):

Description: (1) Implemented an automated sinusoidal fitting functionality

(2) FFT available with scantable.fft()
(3) fixed a bug of parsing 'edge' param used by linefinder.
(4) a function to show progress status for row-based iterations.


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 130.5 KB
Line 
1"""This module defines the scantable class."""
2
3import os
4import numpy
5try:
6    from functools import wraps as wraps_dec
7except ImportError:
8    from asap.compatibility import wraps as wraps_dec
9
10from asap.env import is_casapy
11from asap._asap import Scantable
12from asap._asap import filler, msfiller
13from asap.parameters import rcParams
14from asap.logging import asaplog, asaplog_post_dec
15from asap.selector import selector
16from asap.linecatalog import linecatalog
17from asap.coordinate import coordinate
18from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
19from asap.asapfitter import fitter
20
21
22def preserve_selection(func):
23    @wraps_dec(func)
24    def wrap(obj, *args, **kw):
25        basesel = obj.get_selection()
26        try:
27            val = func(obj, *args, **kw)
28        finally:
29            obj.set_selection(basesel)
30        return val
31    return wrap
32
33def is_scantable(filename):
34    """Is the given file a scantable?
35
36    Parameters:
37
38        filename: the name of the file/directory to test
39
40    """
41    if ( os.path.isdir(filename)
42         and os.path.exists(filename+'/table.info')
43         and os.path.exists(filename+'/table.dat') ):
44        f=open(filename+'/table.info')
45        l=f.readline()
46        f.close()
47        #if ( l.find('Scantable') != -1 ):
48        if ( l.find('Measurement Set') == -1 ):
49            return True
50        else:
51            return False
52    else:
53        return False
54##     return (os.path.isdir(filename)
55##             and not os.path.exists(filename+'/table.f1')
56##             and os.path.exists(filename+'/table.info'))
57
58def is_ms(filename):
59    """Is the given file a MeasurementSet?
60
61    Parameters:
62
63        filename: the name of the file/directory to test
64
65    """
66    if ( os.path.isdir(filename)
67         and os.path.exists(filename+'/table.info')
68         and os.path.exists(filename+'/table.dat') ):
69        f=open(filename+'/table.info')
70        l=f.readline()
71        f.close()
72        if ( l.find('Measurement Set') != -1 ):
73            return True
74        else:
75            return False
76    else:
77        return False
78
79def normalise_edge_param(edge):
80    """\
81    Convert a given edge value to a one-dimensional array that can be
82    given to baseline-fitting/subtraction functions.
83    The length of the output value will be an even because values for
84    the both sides of spectra are to be contained for each IF. When
85    the length is 2, the values will be applied to all IFs. If the length
86    is larger than 2, it will be 2*ifnos().
87    Accepted format of edge include:
88            * an integer - will be used for both sides of spectra of all IFs.
89                  e.g. 10 is converted to [10,10]
90            * an empty list/tuple [] - converted to [0, 0] and used for all IFs.
91            * a list/tuple containing an integer - same as the above case.
92                  e.g. [10] is converted to [10,10]
93            * a list/tuple containing two integers - will be used for all IFs.
94                  e.g. [5,10] is output as it is. no need to convert.
95            * a list/tuple of lists/tuples containing TWO integers -
96                  each element of edge will be used for each IF.
97                  e.g. [[5,10],[15,20]] - [5,10] for IF[0] and [15,20] for IF[1].
98                 
99                  If an element contains the same integer values, the input 'edge'
100                  parameter can be given in a simpler shape in the following cases:
101                      ** when len(edge)!=2
102                          any elements containing the same values can be replaced
103                          to single integers.
104                          e.g. [[15,15]] can be simplified to [15] (or [15,15] or 15 also).
105                          e.g. [[1,1],[2,2],[3,3]] can be simplified to [1,2,3].
106                      ** when len(edge)=2
107                          care is needed for this case: ONLY ONE of the
108                          elements can be a single integer,
109                          e.g. [[5,5],[10,10]] can be simplified to [5,[10,10]]
110                               or [[5,5],10], but can NOT be simplified to [5,10].
111                               when [5,10] given, it is interpreted as
112                               [[5,10],[5,10],[5,10],...] instead, as shown before.
113    """
114    from asap import _is_sequence_or_number as _is_valid
115    if isinstance(edge, list) or isinstance(edge, tuple):
116        for edgepar in edge:
117            if not _is_valid(edgepar, int):
118                raise ValueError, "Each element of the 'edge' tuple has \
119                                   to be a pair of integers or an integer."
120            if isinstance(edgepar, list) or isinstance(edgepar, tuple):
121                if len(edgepar) != 2:
122                    raise ValueError, "Each element of the 'edge' tuple has \
123                                       to be a pair of integers or an integer."
124    else:
125        if not _is_valid(edge, int):
126            raise ValueError, "Parameter 'edge' has to be an integer or a \
127                               pair of integers specified as a tuple. \
128                               Nested tuples are allowed \
129                               to make individual selection for different IFs."
130       
131
132    if isinstance(edge, int):
133        edge = [ edge, edge ]                 # e.g. 3   => [3,3]
134    elif isinstance(edge, list) or isinstance(edge, tuple):
135        if len(edge) == 0:
136            edge = [0, 0]                     # e.g. []  => [0,0]
137        elif len(edge) == 1:
138            if isinstance(edge[0], int):
139                edge = [ edge[0], edge[0] ]   # e.g. [1] => [1,1]
140
141    commonedge = True
142    if len(edge) > 2: commonedge = False
143    else:
144        for edgepar in edge:
145            if isinstance(edgepar, list) or isinstance(edgepar, tuple):
146                commonedge = False
147                break
148
149    if commonedge:
150        if len(edge) > 1:
151            norm_edge = edge
152        else:
153            norm_edge = edge + edge
154    else:
155        norm_edge = []
156        for edgepar in edge:
157            if isinstance(edgepar, int):
158                norm_edge += [edgepar, edgepar]
159            else:
160                norm_edge += edgepar
161
162    return norm_edge
163
164def raise_fitting_failure_exception(e):
165    msg = "The fit failed, possibly because it didn't converge."
166    if rcParams["verbose"]:
167        asaplog.push(str(e))
168        asaplog.push(str(msg))
169    else:
170        raise RuntimeError(str(e)+'\n'+msg)
171   
172
173class scantable(Scantable):
174    """\
175        The ASAP container for scans (single-dish data).
176    """
177
178    @asaplog_post_dec
179    #def __init__(self, filename, average=None, unit=None, getpt=None,
180    #             antenna=None, parallactify=None):
181    def __init__(self, filename, average=None, unit=None, parallactify=None, **args):
182        """\
183        Create a scantable from a saved one or make a reference
184
185        Parameters:
186
187            filename:     the name of an asap table on disk
188                          or
189                          the name of a rpfits/sdfits/ms file
190                          (integrations within scans are auto averaged
191                          and the whole file is read) or
192                          [advanced] a reference to an existing scantable
193
194            average:      average all integrations withinb a scan on read.
195                          The default (True) is taken from .asaprc.
196
197            unit:         brightness unit; must be consistent with K or Jy.
198                          Over-rides the default selected by the filler
199                          (input rpfits/sdfits/ms) or replaces the value
200                          in existing scantables
201
202            getpt:        for MeasurementSet input data only:
203                          If True, all pointing data are filled.
204                          The deafult is False, which makes time to load
205                          the MS data faster in some cases.
206
207            antenna:      for MeasurementSet input data only:
208                          Antenna selection. integer (id) or string (name or id).
209
210            parallactify: Indicate that the data had been parallatified. Default
211                          is taken from rc file.
212
213        """
214        if average is None:
215            average = rcParams['scantable.autoaverage']
216        #if getpt is None:
217        #    getpt = True
218        #if antenna is not None:
219        #    asaplog.push("Antenna selection currently unsupported."
220        #                 "Using ''")
221        #    asaplog.post('WARN')
222        #if antenna is None:
223        #    antenna = ''
224        #elif type(antenna) == int:
225        #    antenna = '%s' % antenna
226        #elif type(antenna) == list:
227        #    tmpstr = ''
228        #    for i in range( len(antenna) ):
229        #        if type(antenna[i]) == int:
230        #            tmpstr = tmpstr + ('%s,'%(antenna[i]))
231        #        elif type(antenna[i]) == str:
232        #            tmpstr=tmpstr+antenna[i]+','
233        #        else:
234        #            raise TypeError('Bad antenna selection.')
235        #    antenna = tmpstr.rstrip(',')
236        parallactify = parallactify or rcParams['scantable.parallactify']
237        varlist = vars()
238        from asap._asap import stmath
239        self._math = stmath( rcParams['insitu'] )
240        if isinstance(filename, Scantable):
241            Scantable.__init__(self, filename)
242        else:
243            if isinstance(filename, str):
244                filename = os.path.expandvars(filename)
245                filename = os.path.expanduser(filename)
246                if not os.path.exists(filename):
247                    s = "File '%s' not found." % (filename)
248                    raise IOError(s)
249                if is_scantable(filename):
250                    ondisk = rcParams['scantable.storage'] == 'disk'
251                    Scantable.__init__(self, filename, ondisk)
252                    if unit is not None:
253                        self.set_fluxunit(unit)
254                    if average:
255                        self._assign( self.average_time( scanav=True ) )
256                    # do not reset to the default freqframe
257                    #self.set_freqframe(rcParams['scantable.freqframe'])
258                #elif os.path.isdir(filename) \
259                #         and not os.path.exists(filename+'/table.f1'):
260                elif is_ms(filename):
261                    # Measurement Set
262                    opts={'ms': {}}
263                    mskeys=['getpt','antenna']
264                    for key in mskeys:
265                        if key in args.keys():
266                            opts['ms'][key] = args[key]
267                    #self._fill([filename], unit, average, getpt, antenna)
268                    self._fill([filename], unit, average, opts)
269                elif os.path.isfile(filename):
270                    #self._fill([filename], unit, average, getpt, antenna)
271                    self._fill([filename], unit, average)
272                else:
273                    msg = "The given file '%s'is not a valid " \
274                          "asap table." % (filename)
275                    raise IOError(msg)
276            elif (isinstance(filename, list) or isinstance(filename, tuple)) \
277                  and isinstance(filename[-1], str):
278                #self._fill(filename, unit, average, getpt, antenna)
279                self._fill(filename, unit, average)
280        self.parallactify(parallactify)
281        self._add_history("scantable", varlist)
282
283    @asaplog_post_dec
284    def save(self, name=None, format=None, overwrite=False):
285        """\
286        Store the scantable on disk. This can be an asap (aips++) Table,
287        SDFITS or MS2 format.
288
289        Parameters:
290
291            name:        the name of the outputfile. For format "ASCII"
292                         this is the root file name (data in 'name'.txt
293                         and header in 'name'_header.txt)
294
295            format:      an optional file format. Default is ASAP.
296                         Allowed are:
297
298                            * 'ASAP' (save as ASAP [aips++] Table),
299                            * 'SDFITS' (save as SDFITS file)
300                            * 'ASCII' (saves as ascii text file)
301                            * 'MS2' (saves as an casacore MeasurementSet V2)
302                            * 'FITS' (save as image FITS - not readable by class)
303                            * 'CLASS' (save as FITS readable by CLASS)
304
305            overwrite:   If the file should be overwritten if it exists.
306                         The default False is to return with warning
307                         without writing the output. USE WITH CARE.
308
309        Example::
310
311            scan.save('myscan.asap')
312            scan.save('myscan.sdfits', 'SDFITS')
313
314        """
315        from os import path
316        format = format or rcParams['scantable.save']
317        suffix = '.'+format.lower()
318        if name is None or name == "":
319            name = 'scantable'+suffix
320            msg = "No filename given. Using default name %s..." % name
321            asaplog.push(msg)
322        name = path.expandvars(name)
323        if path.isfile(name) or path.isdir(name):
324            if not overwrite:
325                msg = "File %s exists." % name
326                raise IOError(msg)
327        format2 = format.upper()
328        if format2 == 'ASAP':
329            self._save(name)
330        elif format2 == 'MS2':
331            msopt = {'ms': {'overwrite': overwrite } }
332            from asap._asap import mswriter
333            writer = mswriter( self )
334            writer.write( name, msopt )
335        else:
336            from asap._asap import stwriter as stw
337            writer = stw(format2)
338            writer.write(self, name)
339        return
340
341    def copy(self):
342        """Return a copy of this scantable.
343
344        *Note*:
345
346            This makes a full (deep) copy. scan2 = scan1 makes a reference.
347
348        Example::
349
350            copiedscan = scan.copy()
351
352        """
353        sd = scantable(Scantable._copy(self))
354        return sd
355
356    def drop_scan(self, scanid=None):
357        """\
358        Return a new scantable where the specified scan number(s) has(have)
359        been dropped.
360
361        Parameters:
362
363            scanid:    a (list of) scan number(s)
364
365        """
366        from asap import _is_sequence_or_number as _is_valid
367        from asap import _to_list
368        from asap import unique
369        if not _is_valid(scanid):
370            raise RuntimeError( 'Please specify a scanno to drop from the scantable' )
371        scanid = _to_list(scanid)
372        allscans = unique([ self.getscan(i) for i in range(self.nrow())])
373        for sid in scanid: allscans.remove(sid)
374        if len(allscans) == 0:
375            raise ValueError("Can't remove all scans")
376        sel = selector(scans=allscans)
377        return self._select_copy(sel)
378
379    def _select_copy(self, selection):
380        orig = self.get_selection()
381        self.set_selection(orig+selection)
382        cp = self.copy()
383        self.set_selection(orig)
384        return cp
385
386    def get_scan(self, scanid=None):
387        """\
388        Return a specific scan (by scanno) or collection of scans (by
389        source name) in a new scantable.
390
391        *Note*:
392
393            See scantable.drop_scan() for the inverse operation.
394
395        Parameters:
396
397            scanid:    a (list of) scanno or a source name, unix-style
398                       patterns are accepted for source name matching, e.g.
399                       '*_R' gets all 'ref scans
400
401        Example::
402
403            # get all scans containing the source '323p459'
404            newscan = scan.get_scan('323p459')
405            # get all 'off' scans
406            refscans = scan.get_scan('*_R')
407            # get a susbset of scans by scanno (as listed in scan.summary())
408            newscan = scan.get_scan([0, 2, 7, 10])
409
410        """
411        if scanid is None:
412            raise RuntimeError( 'Please specify a scan no or name to '
413                                'retrieve from the scantable' )
414        try:
415            bsel = self.get_selection()
416            sel = selector()
417            if type(scanid) is str:
418                sel.set_name(scanid)
419                return self._select_copy(sel)
420            elif type(scanid) is int:
421                sel.set_scans([scanid])
422                return self._select_copy(sel)
423            elif type(scanid) is list:
424                sel.set_scans(scanid)
425                return self._select_copy(sel)
426            else:
427                msg = "Illegal scanid type, use 'int' or 'list' if ints."
428                raise TypeError(msg)
429        except RuntimeError:
430            raise
431
432    def __str__(self):
433        return Scantable._summary(self)
434
435    def summary(self, filename=None):
436        """\
437        Print a summary of the contents of this scantable.
438
439        Parameters:
440
441            filename:    the name of a file to write the putput to
442                         Default - no file output
443
444        """
445        info = Scantable._summary(self)
446        if filename is not None:
447            if filename is "":
448                filename = 'scantable_summary.txt'
449            from os.path import expandvars, isdir
450            filename = expandvars(filename)
451            if not isdir(filename):
452                data = open(filename, 'w')
453                data.write(info)
454                data.close()
455            else:
456                msg = "Illegal file name '%s'." % (filename)
457                raise IOError(msg)
458        return page(info)
459
460    def get_spectrum(self, rowno):
461        """Return the spectrum for the current row in the scantable as a list.
462
463        Parameters:
464
465             rowno:   the row number to retrieve the spectrum from
466
467        """
468        return self._getspectrum(rowno)
469
470    def get_mask(self, rowno):
471        """Return the mask for the current row in the scantable as a list.
472
473        Parameters:
474
475             rowno:   the row number to retrieve the mask from
476
477        """
478        return self._getmask(rowno)
479
480    def set_spectrum(self, spec, rowno):
481        """Set the spectrum for the current row in the scantable.
482
483        Parameters:
484
485             spec:   the new spectrum
486
487             rowno:  the row number to set the spectrum for
488
489        """
490        assert(len(spec) == self.nchan())
491        return self._setspectrum(spec, rowno)
492
493    def get_coordinate(self, rowno):
494        """Return the (spectral) coordinate for a a given 'rowno'.
495
496        *Note*:
497
498            * This coordinate is only valid until a scantable method modifies
499              the frequency axis.
500            * This coordinate does contain the original frequency set-up
501              NOT the new frame. The conversions however are done using the user
502              specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
503              use scantable.freq_align first. Without it there is no closure,
504              i.e.::
505
506                  c = myscan.get_coordinate(0)
507                  c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
508
509        Parameters:
510
511             rowno:    the row number for the spectral coordinate
512
513        """
514        return coordinate(Scantable.get_coordinate(self, rowno))
515
516    def get_selection(self):
517        """\
518        Get the selection object currently set on this scantable.
519
520        Example::
521
522            sel = scan.get_selection()
523            sel.set_ifs(0)              # select IF 0
524            scan.set_selection(sel)     # apply modified selection
525
526        """
527        return selector(self._getselection())
528
529    def set_selection(self, selection=None, **kw):
530        """\
531        Select a subset of the data. All following operations on this scantable
532        are only applied to thi selection.
533
534        Parameters:
535
536            selection:    a selector object (default unset the selection), or
537                          any combination of "pols", "ifs", "beams", "scans",
538                          "cycles", "name", "query"
539
540        Examples::
541
542            sel = selector()         # create a selection object
543            self.set_scans([0, 3])    # select SCANNO 0 and 3
544            scan.set_selection(sel)  # set the selection
545            scan.summary()           # will only print summary of scanno 0 an 3
546            scan.set_selection()     # unset the selection
547            # or the equivalent
548            scan.set_selection(scans=[0,3])
549            scan.summary()           # will only print summary of scanno 0 an 3
550            scan.set_selection()     # unset the selection
551
552        """
553        if selection is None:
554            # reset
555            if len(kw) == 0:
556                selection = selector()
557            else:
558                # try keywords
559                for k in kw:
560                    if k not in selector.fields:
561                        raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
562                selection = selector(**kw)
563        self._setselection(selection)
564
565    def get_row(self, row=0, insitu=None):
566        """\
567        Select a row in the scantable.
568        Return a scantable with single row.
569
570        Parameters:
571
572            row:    row no of integration, default is 0.
573            insitu: if False a new scantable is returned. Otherwise, the
574                    scaling is done in-situ. The default is taken from .asaprc
575                    (False)
576
577        """
578        if insitu is None: insitu = rcParams['insitu']
579        if not insitu:
580            workscan = self.copy()
581        else:
582            workscan = self
583        # Select a row
584        sel=selector()
585        sel.set_rows([row])
586        #sel.set_scans([workscan.getscan(row)])
587        #sel.set_cycles([workscan.getcycle(row)])
588        #sel.set_beams([workscan.getbeam(row)])
589        #sel.set_ifs([workscan.getif(row)])
590        #sel.set_polarisations([workscan.getpol(row)])
591        #sel.set_name(workscan._getsourcename(row))
592        workscan.set_selection(sel)
593        if not workscan.nrow() == 1:
594            msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
595            raise RuntimeError(msg)
596        del sel
597        if insitu:
598            self._assign(workscan)
599        else:
600            return workscan
601
602    @asaplog_post_dec
603    def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
604        """\
605        Determine the specified statistic of the current beam/if/pol
606        Takes a 'mask' as an optional parameter to specify which
607        channels should be excluded.
608
609        Parameters:
610
611            stat:    'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
612                     'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
613
614            mask:    an optional mask specifying where the statistic
615                     should be determined.
616
617            form:    format string to print statistic values
618
619            row:     row number of spectrum to process.
620                     (default is None: for all rows)
621
622        Example:
623            scan.set_unit('channel')
624            msk = scan.create_mask([100, 200], [500, 600])
625            scan.stats(stat='mean', mask=m)
626
627        """
628        mask = mask or []
629        if not self._check_ifs():
630            raise ValueError("Cannot apply mask as the IFs have different "
631                             "number of channels. Please use setselection() "
632                             "to select individual IFs")
633        rtnabc = False
634        if stat.lower().endswith('_abc'): rtnabc = True
635        getchan = False
636        if stat.lower().startswith('min') or stat.lower().startswith('max'):
637            chan = self._math._minmaxchan(self, mask, stat)
638            getchan = True
639            statvals = []
640        if not rtnabc:
641            if row == None:
642                statvals = self._math._stats(self, mask, stat)
643            else:
644                statvals = self._math._statsrow(self, mask, stat, int(row))
645
646        #def cb(i):
647        #    return statvals[i]
648
649        #return self._row_callback(cb, stat)
650
651        label=stat
652        #callback=cb
653        out = ""
654        #outvec = []
655        sep = '-'*50
656
657        if row == None:
658            rows = xrange(self.nrow())
659        elif isinstance(row, int):
660            rows = [ row ]
661
662        for i in rows:
663            refstr = ''
664            statunit= ''
665            if getchan:
666                qx, qy = self.chan2data(rowno=i, chan=chan[i])
667                if rtnabc:
668                    statvals.append(qx['value'])
669                    refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
670                    statunit= '['+qx['unit']+']'
671                else:
672                    refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
673
674            tm = self._gettime(i)
675            src = self._getsourcename(i)
676            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
677            out += 'Time[%s]:\n' % (tm)
678            if self.nbeam(-1) > 1: out +=  ' Beam[%d] ' % (self.getbeam(i))
679            if self.nif(-1) > 1:   out +=  ' IF[%d] ' % (self.getif(i))
680            if self.npol(-1) > 1:  out +=  ' Pol[%d] ' % (self.getpol(i))
681            #outvec.append(callback(i))
682            if len(rows) > 1:
683                # out += ('= %'+form) % (outvec[i]) +'   '+refstr+'\n'
684                out += ('= %'+form) % (statvals[i]) +'   '+refstr+'\n'
685            else:
686                # out += ('= %'+form) % (outvec[0]) +'   '+refstr+'\n'
687                out += ('= %'+form) % (statvals[0]) +'   '+refstr+'\n'
688            out +=  sep+"\n"
689
690        import os
691        if os.environ.has_key( 'USER' ):
692            usr = os.environ['USER']
693        else:
694            import commands
695            usr = commands.getoutput( 'whoami' )
696        tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
697        f = open(tmpfile,'w')
698        print >> f, sep
699        print >> f, ' %s %s' % (label, statunit)
700        print >> f, sep
701        print >> f, out
702        f.close()
703        f = open(tmpfile,'r')
704        x = f.readlines()
705        f.close()
706        asaplog.push(''.join(x), False)
707
708        return statvals
709
710    def chan2data(self, rowno=0, chan=0):
711        """\
712        Returns channel/frequency/velocity and spectral value
713        at an arbitrary row and channel in the scantable.
714
715        Parameters:
716
717            rowno:   a row number in the scantable. Default is the
718                     first row, i.e. rowno=0
719
720            chan:    a channel in the scantable. Default is the first
721                     channel, i.e. pos=0
722
723        """
724        if isinstance(rowno, int) and isinstance(chan, int):
725            qx = {'unit': self.get_unit(),
726                  'value': self._getabcissa(rowno)[chan]}
727            qy = {'unit': self.get_fluxunit(),
728                  'value': self._getspectrum(rowno)[chan]}
729            return qx, qy
730
731    def stddev(self, mask=None):
732        """\
733        Determine the standard deviation of the current beam/if/pol
734        Takes a 'mask' as an optional parameter to specify which
735        channels should be excluded.
736
737        Parameters:
738
739            mask:    an optional mask specifying where the standard
740                     deviation should be determined.
741
742        Example::
743
744            scan.set_unit('channel')
745            msk = scan.create_mask([100, 200], [500, 600])
746            scan.stddev(mask=m)
747
748        """
749        return self.stats(stat='stddev', mask=mask);
750
751
752    def get_column_names(self):
753        """\
754        Return a  list of column names, which can be used for selection.
755        """
756        return list(Scantable.get_column_names(self))
757
758    def get_tsys(self, row=-1):
759        """\
760        Return the System temperatures.
761
762        Parameters:
763
764            row:    the rowno to get the information for. (default all rows)
765
766        Returns:
767
768            a list of Tsys values for the current selection
769
770        """
771        if row > -1:
772            return self._get_column(self._gettsys, row)
773        return self._row_callback(self._gettsys, "Tsys")
774
775
776    def get_weather(self, row=-1):
777        """\
778        Return the weather informations.
779
780        Parameters:
781
782            row:    the rowno to get the information for. (default all rows)
783
784        Returns:
785
786            a dict or list of of dicts of values for the current selection
787
788        """
789
790        values = self._get_column(self._get_weather, row)
791        if row > -1:
792            return {'temperature': values[0],
793                    'pressure': values[1], 'humidity' : values[2],
794                    'windspeed' : values[3], 'windaz' : values[4]
795                    }
796        else:
797            out = []
798            for r in values:
799
800                out.append({'temperature': r[0],
801                            'pressure': r[1], 'humidity' : r[2],
802                            'windspeed' : r[3], 'windaz' : r[4]
803                    })
804            return out
805
806    def _row_callback(self, callback, label):
807        out = ""
808        outvec = []
809        sep = '-'*50
810        for i in range(self.nrow()):
811            tm = self._gettime(i)
812            src = self._getsourcename(i)
813            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
814            out += 'Time[%s]:\n' % (tm)
815            if self.nbeam(-1) > 1:
816                out +=  ' Beam[%d] ' % (self.getbeam(i))
817            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (self.getif(i))
818            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (self.getpol(i))
819            outvec.append(callback(i))
820            out += '= %3.3f\n' % (outvec[i])
821            out +=  sep+'\n'
822
823        asaplog.push(sep)
824        asaplog.push(" %s" % (label))
825        asaplog.push(sep)
826        asaplog.push(out)
827        asaplog.post()
828        return outvec
829
830    def _get_column(self, callback, row=-1, *args):
831        """
832        """
833        if row == -1:
834            return [callback(i, *args) for i in range(self.nrow())]
835        else:
836            if  0 <= row < self.nrow():
837                return callback(row, *args)
838
839
840    def get_time(self, row=-1, asdatetime=False, prec=-1):
841        """\
842        Get a list of time stamps for the observations.
843        Return a datetime object or a string (default) for each
844        integration time stamp in the scantable.
845
846        Parameters:
847
848            row:          row no of integration. Default -1 return all rows
849
850            asdatetime:   return values as datetime objects rather than strings
851
852            prec:         number of digits shown. Default -1 to automatic calculation.
853                          Note this number is equals to the digits of MVTime,
854                          i.e., 0<prec<3: dates with hh:: only,
855                          <5: with hh:mm:, <7 or 0: with hh:mm:ss,
856                          and 6> : with hh:mm:ss.tt... (prec-6 t's added)
857
858        """
859        from datetime import datetime
860        if prec < 0:
861            # automagically set necessary precision +1
862            prec = 7 - numpy.floor(numpy.log10(numpy.min(self.get_inttime(row))))
863            prec = max(6, int(prec))
864        else:
865            prec = max(0, prec)
866        if asdatetime:
867            #precision can be 1 millisecond at max
868            prec = min(12, prec)
869
870        times = self._get_column(self._gettime, row, prec)
871        if not asdatetime:
872            return times
873        format = "%Y/%m/%d/%H:%M:%S.%f"
874        if prec < 7:
875            nsub = 1 + (((6-prec)/2) % 3)
876            substr = [".%f","%S","%M"]
877            for i in range(nsub):
878                format = format.replace(substr[i],"")
879        if isinstance(times, list):
880            return [datetime.strptime(i, format) for i in times]
881        else:
882            return datetime.strptime(times, format)
883
884
885    def get_inttime(self, row=-1):
886        """\
887        Get a list of integration times for the observations.
888        Return a time in seconds for each integration in the scantable.
889
890        Parameters:
891
892            row:    row no of integration. Default -1 return all rows.
893
894        """
895        return self._get_column(self._getinttime, row)
896
897
898    def get_sourcename(self, row=-1):
899        """\
900        Get a list source names for the observations.
901        Return a string for each integration in the scantable.
902        Parameters:
903
904            row:    row no of integration. Default -1 return all rows.
905
906        """
907        return self._get_column(self._getsourcename, row)
908
909    def get_elevation(self, row=-1):
910        """\
911        Get a list of elevations for the observations.
912        Return a float for each integration in the scantable.
913
914        Parameters:
915
916            row:    row no of integration. Default -1 return all rows.
917
918        """
919        return self._get_column(self._getelevation, row)
920
921    def get_azimuth(self, row=-1):
922        """\
923        Get a list of azimuths for the observations.
924        Return a float for each integration in the scantable.
925
926        Parameters:
927            row:    row no of integration. Default -1 return all rows.
928
929        """
930        return self._get_column(self._getazimuth, row)
931
932    def get_parangle(self, row=-1):
933        """\
934        Get a list of parallactic angles for the observations.
935        Return a float for each integration in the scantable.
936
937        Parameters:
938
939            row:    row no of integration. Default -1 return all rows.
940
941        """
942        return self._get_column(self._getparangle, row)
943
944    def get_direction(self, row=-1):
945        """
946        Get a list of Positions on the sky (direction) for the observations.
947        Return a string for each integration in the scantable.
948
949        Parameters:
950
951            row:    row no of integration. Default -1 return all rows
952
953        """
954        return self._get_column(self._getdirection, row)
955
956    def get_directionval(self, row=-1):
957        """\
958        Get a list of Positions on the sky (direction) for the observations.
959        Return a float for each integration in the scantable.
960
961        Parameters:
962
963            row:    row no of integration. Default -1 return all rows
964
965        """
966        return self._get_column(self._getdirectionvec, row)
967
968    @asaplog_post_dec
969    def set_unit(self, unit='channel'):
970        """\
971        Set the unit for all following operations on this scantable
972
973        Parameters:
974
975            unit:    optional unit, default is 'channel'. Use one of '*Hz',
976                     'km/s', 'channel' or equivalent ''
977
978        """
979        varlist = vars()
980        if unit in ['', 'pixel', 'channel']:
981            unit = ''
982        inf = list(self._getcoordinfo())
983        inf[0] = unit
984        self._setcoordinfo(inf)
985        self._add_history("set_unit", varlist)
986
987    @asaplog_post_dec
988    def set_instrument(self, instr):
989        """\
990        Set the instrument for subsequent processing.
991
992        Parameters:
993
994            instr:    Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
995                      'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
996
997        """
998        self._setInstrument(instr)
999        self._add_history("set_instument", vars())
1000
1001    @asaplog_post_dec
1002    def set_feedtype(self, feedtype):
1003        """\
1004        Overwrite the feed type, which might not be set correctly.
1005
1006        Parameters:
1007
1008            feedtype:     'linear' or 'circular'
1009
1010        """
1011        self._setfeedtype(feedtype)
1012        self._add_history("set_feedtype", vars())
1013
1014    @asaplog_post_dec
1015    def set_doppler(self, doppler='RADIO'):
1016        """\
1017        Set the doppler for all following operations on this scantable.
1018
1019        Parameters:
1020
1021            doppler:    One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
1022
1023        """
1024        varlist = vars()
1025        inf = list(self._getcoordinfo())
1026        inf[2] = doppler
1027        self._setcoordinfo(inf)
1028        self._add_history("set_doppler", vars())
1029
1030    @asaplog_post_dec
1031    def set_freqframe(self, frame=None):
1032        """\
1033        Set the frame type of the Spectral Axis.
1034
1035        Parameters:
1036
1037            frame:   an optional frame type, default 'LSRK'. Valid frames are:
1038                     'TOPO', 'LSRD', 'LSRK', 'BARY',
1039                     'GEO', 'GALACTO', 'LGROUP', 'CMB'
1040
1041        Example::
1042
1043            scan.set_freqframe('BARY')
1044
1045        """
1046        frame = frame or rcParams['scantable.freqframe']
1047        varlist = vars()
1048        # "REST" is not implemented in casacore
1049        #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
1050        #           'GEO', 'GALACTO', 'LGROUP', 'CMB']
1051        valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
1052                   'GEO', 'GALACTO', 'LGROUP', 'CMB']
1053
1054        if frame in valid:
1055            inf = list(self._getcoordinfo())
1056            inf[1] = frame
1057            self._setcoordinfo(inf)
1058            self._add_history("set_freqframe", varlist)
1059        else:
1060            msg  = "Please specify a valid freq type. Valid types are:\n", valid
1061            raise TypeError(msg)
1062
1063    @asaplog_post_dec
1064    def set_dirframe(self, frame=""):
1065        """\
1066        Set the frame type of the Direction on the sky.
1067
1068        Parameters:
1069
1070            frame:   an optional frame type, default ''. Valid frames are:
1071                     'J2000', 'B1950', 'GALACTIC'
1072
1073        Example:
1074
1075            scan.set_dirframe('GALACTIC')
1076
1077        """
1078        varlist = vars()
1079        Scantable.set_dirframe(self, frame)
1080        self._add_history("set_dirframe", varlist)
1081
1082    def get_unit(self):
1083        """\
1084        Get the default unit set in this scantable
1085
1086        Returns:
1087
1088            A unit string
1089
1090        """
1091        inf = self._getcoordinfo()
1092        unit = inf[0]
1093        if unit == '': unit = 'channel'
1094        return unit
1095
1096    @asaplog_post_dec
1097    def get_abcissa(self, rowno=0):
1098        """\
1099        Get the abcissa in the current coordinate setup for the currently
1100        selected Beam/IF/Pol
1101
1102        Parameters:
1103
1104            rowno:    an optional row number in the scantable. Default is the
1105                      first row, i.e. rowno=0
1106
1107        Returns:
1108
1109            The abcissa values and the format string (as a dictionary)
1110
1111        """
1112        abc = self._getabcissa(rowno)
1113        lbl = self._getabcissalabel(rowno)
1114        return abc, lbl
1115
1116    @asaplog_post_dec
1117    def flag(self, row=-1, mask=None, unflag=False):
1118        """\
1119        Flag the selected data using an optional channel mask.
1120
1121        Parameters:
1122
1123            row:    an optional row number in the scantable.
1124                      Default -1 flags all rows
1125                     
1126            mask:   an optional channel mask, created with create_mask. Default
1127                    (no mask) is all channels.
1128
1129            unflag:    if True, unflag the data
1130
1131        """
1132        varlist = vars()
1133        mask = mask or []
1134        self._flag(row, mask, unflag)
1135        self._add_history("flag", varlist)
1136
1137    @asaplog_post_dec
1138    def flag_row(self, rows=[], unflag=False):
1139        """\
1140        Flag the selected data in row-based manner.
1141
1142        Parameters:
1143
1144            rows:   list of row numbers to be flagged. Default is no row
1145                    (must be explicitly specified to execute row-based flagging).
1146
1147            unflag: if True, unflag the data.
1148
1149        """
1150        varlist = vars()
1151        self._flag_row(rows, unflag)
1152        self._add_history("flag_row", varlist)
1153
1154    @asaplog_post_dec
1155    def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
1156        """\
1157        Flag the selected data outside a specified range (in channel-base)
1158
1159        Parameters:
1160
1161            uthres:      upper threshold.
1162
1163            dthres:      lower threshold
1164
1165            clipoutside: True for flagging data outside the range [dthres:uthres].
1166                         False for flagging data inside the range.
1167
1168            unflag:      if True, unflag the data.
1169
1170        """
1171        varlist = vars()
1172        self._clip(uthres, dthres, clipoutside, unflag)
1173        self._add_history("clip", varlist)
1174
1175    @asaplog_post_dec
1176    def lag_flag(self, start, end, unit="MHz", insitu=None):
1177        """\
1178        Flag the data in 'lag' space by providing a frequency to remove.
1179        Flagged data in the scantable get interpolated over the region.
1180        No taper is applied.
1181
1182        Parameters:
1183
1184            start:    the start frequency (really a period within the
1185                      bandwidth)  or period to remove
1186
1187            end:      the end frequency or period to remove
1188
1189            unit:     the frequency unit (default "MHz") or "" for
1190                      explicit lag channels
1191
1192        *Notes*:
1193
1194            It is recommended to flag edges of the band or strong
1195            signals beforehand.
1196
1197        """
1198        if insitu is None: insitu = rcParams['insitu']
1199        self._math._setinsitu(insitu)
1200        varlist = vars()
1201        base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
1202        if not (unit == "" or base.has_key(unit)):
1203            raise ValueError("%s is not a valid unit." % unit)
1204        if unit == "":
1205            s = scantable(self._math._lag_flag(self, start, end, "lags"))
1206        else:
1207            s = scantable(self._math._lag_flag(self, start*base[unit],
1208                                               end*base[unit], "frequency"))
1209        s._add_history("lag_flag", varlist)
1210        if insitu:
1211            self._assign(s)
1212        else:
1213            return s
1214
1215    @asaplog_post_dec
1216    def fft(self, rowno=[], mask=[], getrealimag=False):
1217        """\
1218        Apply FFT to the spectra.
1219        Flagged data in the scantable get interpolated over the region.
1220
1221        Parameters:
1222
1223            rowno:          The row number(s) to be processed. int, list
1224                            and tuple are accepted. By default ([]), FFT
1225                            is applied to the whole data.
1226
1227            mask:           Auxiliary channel mask(s). Given as a boolean
1228                            list, it is applied to all specified rows.
1229                            A list of boolean lists can also be used to
1230                            apply different masks. In the latter case, the
1231                            length of 'mask' must be the same as that of
1232                            'rowno'. The default is [].
1233       
1234            getrealimag:    If True, returns the real and imaginary part
1235                            values of the complex results.
1236                            If False (the default), returns the amplitude
1237                            (absolute value) normalised with Ndata/2 and
1238                            phase (argument, in unit of radian).
1239
1240        Returns:
1241
1242            A list of dictionaries containing the results for each spectrum.
1243            Each dictionary contains two values, the real and the imaginary
1244            parts when getrealimag = True, or the amplitude(absolute value)
1245            and the phase(argument) when getrealimag = False. The key for
1246            these values are 'real' and 'imag', or 'ampl' and 'phase',
1247            respectively.
1248        """
1249        if isinstance(rowno, int):
1250            rowno = [rowno]
1251        elif not (isinstance(rowno, list) or isinstance(rowno, tuple)):
1252            raise TypeError("The row number(s) must be int, list or tuple.")
1253
1254        if len(rowno) == 0: rowno = [i for i in xrange(self.nrow())]
1255
1256        if not (isinstance(mask, list) or isinstance(mask, tuple)):
1257            raise TypeError("The mask must be a boolean list or a list of boolean list.")
1258        if len(mask) == 0: mask = [True for i in xrange(self.nchan())]
1259        if isinstance(mask[0], bool): mask = [mask]
1260        elif not (isinstance(mask[0], list) or isinstance(mask[0], tuple)):
1261            raise TypeError("The mask must be a boolean list or a list of boolean list.")
1262
1263        usecommonmask = (len(mask) == 1)
1264        if not usecommonmask:
1265            if len(mask) != len(rowno):
1266                raise ValueError("When specifying masks for each spectrum, the numbers of them must be identical.")
1267        for amask in mask:
1268            if len(amask) != self.nchan():
1269                raise ValueError("The spectra and the mask have different length.")
1270       
1271        res = []
1272
1273        imask = 0
1274        for whichrow in rowno:
1275            fspec = self._fft(whichrow, mask[imask], getrealimag)
1276            nspec = len(fspec)
1277           
1278            i=0
1279            v1=[]
1280            v2=[]
1281            reselem = {"real":[],"imag":[]} if getrealimag else {"ampl":[],"phase":[]}
1282           
1283            while (i < nspec):
1284                v1.append(fspec[i])
1285                v2.append(fspec[i+1])
1286                i+=2
1287           
1288            if getrealimag:
1289                reselem["real"]  += v1
1290                reselem["imag"]  += v2
1291            else:
1292                reselem["ampl"]  += v1
1293                reselem["phase"] += v2
1294           
1295            res.append(reselem)
1296           
1297            if not usecommonmask: imask += 1
1298       
1299        return res
1300
1301    @asaplog_post_dec
1302    def create_mask(self, *args, **kwargs):
1303        """\
1304        Compute and return a mask based on [min, max] windows.
1305        The specified windows are to be INCLUDED, when the mask is
1306        applied.
1307
1308        Parameters:
1309
1310            [min, max], [min2, max2], ...
1311                Pairs of start/end points (inclusive)specifying the regions
1312                to be masked
1313
1314            invert:     optional argument. If specified as True,
1315                        return an inverted mask, i.e. the regions
1316                        specified are EXCLUDED
1317
1318            row:        create the mask using the specified row for
1319                        unit conversions, default is row=0
1320                        only necessary if frequency varies over rows.
1321
1322        Examples::
1323
1324            scan.set_unit('channel')
1325            # a)
1326            msk = scan.create_mask([400, 500], [800, 900])
1327            # masks everything outside 400 and 500
1328            # and 800 and 900 in the unit 'channel'
1329
1330            # b)
1331            msk = scan.create_mask([400, 500], [800, 900], invert=True)
1332            # masks the regions between 400 and 500
1333            # and 800 and 900 in the unit 'channel'
1334
1335            # c)
1336            #mask only channel 400
1337            msk =  scan.create_mask([400])
1338
1339        """
1340        row = kwargs.get("row", 0)
1341        data = self._getabcissa(row)
1342        u = self._getcoordinfo()[0]
1343        if u == "":
1344            u = "channel"
1345        msg = "The current mask window unit is %s" % u
1346        i = self._check_ifs()
1347        if not i:
1348            msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1349        asaplog.push(msg)
1350        n = self.nchan()
1351        msk = _n_bools(n, False)
1352        # test if args is a 'list' or a 'normal *args - UGLY!!!
1353
1354        ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
1355             and args or args[0]
1356        for window in ws:
1357            if len(window) == 1:
1358                window = [window[0], window[0]]
1359            if len(window) == 0 or  len(window) > 2:
1360                raise ValueError("A window needs to be defined as [start(, end)]")
1361            if window[0] > window[1]:
1362                tmp = window[0]
1363                window[0] = window[1]
1364                window[1] = tmp
1365            for i in range(n):
1366                if data[i] >= window[0] and data[i] <= window[1]:
1367                    msk[i] = True
1368        if kwargs.has_key('invert'):
1369            if kwargs.get('invert'):
1370                msk = mask_not(msk)
1371        return msk
1372
1373    def get_masklist(self, mask=None, row=0, silent=False):
1374        """\
1375        Compute and return a list of mask windows, [min, max].
1376
1377        Parameters:
1378
1379            mask:       channel mask, created with create_mask.
1380
1381            row:        calcutate the masklist using the specified row
1382                        for unit conversions, default is row=0
1383                        only necessary if frequency varies over rows.
1384
1385        Returns:
1386
1387            [min, max], [min2, max2], ...
1388                Pairs of start/end points (inclusive)specifying
1389                the masked regions
1390
1391        """
1392        if not (isinstance(mask,list) or isinstance(mask, tuple)):
1393            raise TypeError("The mask should be list or tuple.")
1394        if len(mask) < 2:
1395            raise TypeError("The mask elements should be > 1")
1396        if self.nchan() != len(mask):
1397            msg = "Number of channels in scantable != number of mask elements"
1398            raise TypeError(msg)
1399        data = self._getabcissa(row)
1400        u = self._getcoordinfo()[0]
1401        if u == "":
1402            u = "channel"
1403        msg = "The current mask window unit is %s" % u
1404        i = self._check_ifs()
1405        if not i:
1406            msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
1407        if not silent:
1408            asaplog.push(msg)
1409        masklist=[]
1410        ist, ien = None, None
1411        ist, ien=self.get_mask_indices(mask)
1412        if ist is not None and ien is not None:
1413            for i in xrange(len(ist)):
1414                range=[data[ist[i]],data[ien[i]]]
1415                range.sort()
1416                masklist.append([range[0],range[1]])
1417        return masklist
1418
1419    def get_mask_indices(self, mask=None):
1420        """\
1421        Compute and Return lists of mask start indices and mask end indices.
1422
1423        Parameters:
1424
1425            mask:       channel mask, created with create_mask.
1426
1427        Returns:
1428
1429            List of mask start indices and that of mask end indices,
1430            i.e., [istart1,istart2,....], [iend1,iend2,....].
1431
1432        """
1433        if not (isinstance(mask,list) or isinstance(mask, tuple)):
1434            raise TypeError("The mask should be list or tuple.")
1435        if len(mask) < 2:
1436            raise TypeError("The mask elements should be > 1")
1437        istart=[]
1438        iend=[]
1439        if mask[0]: istart.append(0)
1440        for i in range(len(mask)-1):
1441            if not mask[i] and mask[i+1]:
1442                istart.append(i+1)
1443            elif mask[i] and not mask[i+1]:
1444                iend.append(i)
1445        if mask[len(mask)-1]: iend.append(len(mask)-1)
1446        if len(istart) != len(iend):
1447            raise RuntimeError("Numbers of mask start != mask end.")
1448        for i in range(len(istart)):
1449            if istart[i] > iend[i]:
1450                raise RuntimeError("Mask start index > mask end index")
1451                break
1452        return istart,iend
1453
1454    @asaplog_post_dec
1455    def parse_maskexpr(self,maskstring):
1456        """
1457        Parse CASA type mask selection syntax (IF dependent).
1458
1459        Parameters:
1460            maskstring : A string mask selection expression.
1461                         A comma separated selections mean different IF -
1462                         channel combinations. IFs and channel selections
1463                         are partitioned by a colon, ':'.
1464                     examples:
1465                         ''          = all IFs (all channels)
1466                         '<2,4~6,9'  = IFs 0,1,4,5,6,9 (all channels)
1467                         '3:3~45;60' = channels 3 to 45 and 60 in IF 3
1468                         '0~1:2~6,8' = channels 2 to 6 in IFs 0,1, and
1469                                       all channels in IF8
1470        Returns:
1471        A dictionary of selected (valid) IF and masklist pairs,
1472        e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
1473        """
1474        if not isinstance(maskstring,str):
1475            asaplog.post()
1476            asaplog.push("Invalid mask expression")
1477            asaplog.post("ERROR")
1478       
1479        valid_ifs = self.getifnos()
1480        frequnit = self.get_unit()
1481        seldict = {}
1482        if maskstring == "":
1483            maskstring = str(valid_ifs)[1:-1]
1484        ## split each selection
1485        sellist = maskstring.split(',')
1486        for currselstr in sellist:
1487            selset = currselstr.split(':')
1488            # spw and mask string (may include ~, < or >)
1489            spwmasklist = self._parse_selection(selset[0],typestr='integer',
1490                                               offset=1,minval=min(valid_ifs),
1491                                               maxval=max(valid_ifs))
1492            for spwlist in spwmasklist:
1493                selspws = []
1494                for ispw in range(spwlist[0],spwlist[1]+1):
1495                    # Put into the list only if ispw exists
1496                    if valid_ifs.count(ispw):
1497                        selspws.append(ispw)
1498            del spwmasklist, spwlist
1499
1500            # parse frequency mask list
1501            if len(selset) > 1:
1502                freqmasklist = self._parse_selection(selset[1],typestr='float',
1503                                                    offset=0.)
1504            else:
1505                # want to select the whole spectrum
1506                freqmasklist = [None]
1507
1508            ## define a dictionary of spw - masklist combination
1509            for ispw in selspws:
1510                #print "working on", ispw
1511                spwstr = str(ispw)
1512                if len(selspws) == 0:
1513                    # empty spw
1514                    continue
1515                else:
1516                    ## want to get min and max of the spw and
1517                    ## offset to set for '<' and '>'
1518                    if frequnit == 'channel':
1519                        minfreq = 0
1520                        maxfreq = self.nchan(ifno=ispw)
1521                        offset = 0.5
1522                    else:
1523                        ## This is ugly part. need improvement
1524                        for ifrow in xrange(self.nrow()):
1525                            if self.getif(ifrow) == ispw:
1526                                #print "IF",ispw,"found in row =",ifrow
1527                                break
1528                        freqcoord = self.get_coordinate(ifrow)
1529                        freqs = self._getabcissa(ifrow)
1530                        minfreq = min(freqs)
1531                        maxfreq = max(freqs)
1532                        if len(freqs) == 1:
1533                            offset = 0.5
1534                        elif frequnit.find('Hz') > 0:
1535                            offset = abs(freqcoord.to_frequency(1,unit=frequnit)
1536                                      -freqcoord.to_frequency(0,unit=frequnit))*0.5
1537                        elif frequnit.find('m/s') > 0:
1538                            offset = abs(freqcoord.to_velocity(1,unit=frequnit)
1539                                      -freqcoord.to_velocity(0,unit=frequnit))*0.5
1540                        else:
1541                            asaplog.post()
1542                            asaplog.push("Invalid frequency unit")
1543                            asaplog.post("ERROR")
1544                        del freqs, freqcoord, ifrow
1545                    for freq in freqmasklist:
1546                        selmask = freq or [minfreq, maxfreq]
1547                        if selmask[0] == None:
1548                            ## selection was "<freq[1]".
1549                            if selmask[1] < minfreq:
1550                                ## avoid adding region selection
1551                                selmask = None
1552                            else:
1553                                selmask = [minfreq,selmask[1]-offset]
1554                        elif selmask[1] == None:
1555                            ## selection was ">freq[0]"
1556                            if selmask[0] > maxfreq:
1557                                ## avoid adding region selection
1558                                selmask = None
1559                            else:
1560                                selmask = [selmask[0]+offset,maxfreq]
1561                        if selmask:
1562                            if not seldict.has_key(spwstr):
1563                                # new spw selection
1564                                seldict[spwstr] = []
1565                            seldict[spwstr] += [selmask]
1566                    del minfreq,maxfreq,offset,freq,selmask
1567                del spwstr
1568            del freqmasklist
1569        del valid_ifs
1570        if len(seldict) == 0:
1571            asaplog.post()
1572            asaplog.push("No valid selection in the mask expression: "+maskstring)
1573            asaplog.post("WARN")
1574            return None
1575        msg = "Selected masklist:\n"
1576        for sif, lmask in seldict.iteritems():
1577            msg += "   IF"+sif+" - "+str(lmask)+"\n"
1578        asaplog.push(msg)
1579        return seldict
1580
1581    def _parse_selection(self,selstr,typestr='float',offset=0.,minval=None,maxval=None):
1582        """
1583        Parameters:
1584            selstr :    The Selection string, e.g., '<3;5~7;100~103;9'
1585            typestr :   The type of the values in returned list
1586                        ('integer' or 'float')
1587            offset :    The offset value to subtract from or add to
1588                        the boundary value if the selection string
1589                        includes '<' or '>'
1590            minval, maxval :  The minimum/maximum values to set if the
1591                              selection string includes '<' or '>'.
1592                              The list element is filled with None by default.
1593        Returns:
1594            A list of min/max pair of selections.
1595        Example:
1596            _parseSelection('<3;5~7;9',typestr='int',offset=1,minval=0)
1597            returns [[0,2],[5,7],[9,9]]
1598        """
1599        selgroups = selstr.split(';')
1600        sellists = []
1601        if typestr.lower().startswith('int'):
1602            formatfunc = int
1603        else:
1604            formatfunc = float
1605       
1606        for currsel in  selgroups:
1607            if currsel.find('~') > 0:
1608                minsel = formatfunc(currsel.split('~')[0].strip())
1609                maxsel = formatfunc(currsel.split('~')[1].strip())
1610            elif currsel.strip().startswith('<'):
1611                minsel = minval
1612                maxsel = formatfunc(currsel.split('<')[1].strip()) \
1613                         - formatfunc(offset)
1614            elif currsel.strip().startswith('>'):
1615                minsel = formatfunc(currsel.split('>')[1].strip()) \
1616                         + formatfunc(offset)
1617                maxsel = maxval
1618            else:
1619                minsel = formatfunc(currsel)
1620                maxsel = formatfunc(currsel)
1621            sellists.append([minsel,maxsel])
1622        return sellists
1623
1624#    def get_restfreqs(self):
1625#        """
1626#        Get the restfrequency(s) stored in this scantable.
1627#        The return value(s) are always of unit 'Hz'
1628#        Parameters:
1629#            none
1630#        Returns:
1631#            a list of doubles
1632#        """
1633#        return list(self._getrestfreqs())
1634
1635    def get_restfreqs(self, ids=None):
1636        """\
1637        Get the restfrequency(s) stored in this scantable.
1638        The return value(s) are always of unit 'Hz'
1639
1640        Parameters:
1641
1642            ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
1643                 be retrieved
1644
1645        Returns:
1646
1647            dictionary containing ids and a list of doubles for each id
1648
1649        """
1650        if ids is None:
1651            rfreqs={}
1652            idlist = self.getmolnos()
1653            for i in idlist:
1654                rfreqs[i]=list(self._getrestfreqs(i))
1655            return rfreqs
1656        else:
1657            if type(ids)==list or type(ids)==tuple:
1658                rfreqs={}
1659                for i in ids:
1660                    rfreqs[i]=list(self._getrestfreqs(i))
1661                return rfreqs
1662            else:
1663                return list(self._getrestfreqs(ids))
1664            #return list(self._getrestfreqs(ids))
1665
1666    def set_restfreqs(self, freqs=None, unit='Hz'):
1667        """\
1668        Set or replace the restfrequency specified and
1669        if the 'freqs' argument holds a scalar,
1670        then that rest frequency will be applied to all the selected
1671        data.  If the 'freqs' argument holds
1672        a vector, then it MUST be of equal or smaller length than
1673        the number of IFs (and the available restfrequencies will be
1674        replaced by this vector).  In this case, *all* data have
1675        the restfrequency set per IF according
1676        to the corresponding value you give in the 'freqs' vector.
1677        E.g. 'freqs=[1e9, 2e9]'  would mean IF 0 gets restfreq 1e9 and
1678        IF 1 gets restfreq 2e9.
1679
1680        You can also specify the frequencies via a linecatalog.
1681
1682        Parameters:
1683
1684            freqs:   list of rest frequency values or string idenitfiers
1685
1686            unit:    unit for rest frequency (default 'Hz')
1687
1688
1689        Example::
1690
1691            # set the given restfrequency for the all currently selected IFs
1692            scan.set_restfreqs(freqs=1.4e9)
1693            # set restfrequencies for the n IFs  (n > 1) in the order of the
1694            # list, i.e
1695            # IF0 -> 1.4e9, IF1 ->  1.41e9, IF3 -> 1.42e9
1696            # len(list_of_restfreqs) == nIF
1697            # for nIF == 1 the following will set multiple restfrequency for
1698            # that IF
1699            scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
1700            # set multiple restfrequencies per IF. as a list of lists where
1701            # the outer list has nIF elements, the inner s arbitrary
1702            scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
1703
1704       *Note*:
1705
1706            To do more sophisticate Restfrequency setting, e.g. on a
1707            source and IF basis, use scantable.set_selection() before using
1708            this function::
1709
1710                # provided your scantable is called scan
1711                selection = selector()
1712                selection.set_name("ORION*")
1713                selection.set_ifs([1])
1714                scan.set_selection(selection)
1715                scan.set_restfreqs(freqs=86.6e9)
1716
1717        """
1718        varlist = vars()
1719        from asap import linecatalog
1720        # simple  value
1721        if isinstance(freqs, int) or isinstance(freqs, float):
1722            self._setrestfreqs([freqs], [""], unit)
1723        # list of values
1724        elif isinstance(freqs, list) or isinstance(freqs, tuple):
1725            # list values are scalars
1726            if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
1727                if len(freqs) == 1:
1728                    self._setrestfreqs(freqs, [""], unit)
1729                else:
1730                    # allow the 'old' mode of setting mulitple IFs
1731                    sel = selector()
1732                    savesel = self._getselection()
1733                    iflist = self.getifnos()
1734                    if len(freqs)>len(iflist):
1735                        raise ValueError("number of elements in list of list "
1736                                         "exeeds the current IF selections")
1737                    iflist = self.getifnos()
1738                    for i, fval in enumerate(freqs):
1739                        sel.set_ifs(iflist[i])
1740                        self._setselection(sel)
1741                        self._setrestfreqs([fval], [""], unit)
1742                    self._setselection(savesel)
1743
1744            # list values are dict, {'value'=, 'name'=)
1745            elif isinstance(freqs[-1], dict):
1746                values = []
1747                names = []
1748                for d in freqs:
1749                    values.append(d["value"])
1750                    names.append(d["name"])
1751                self._setrestfreqs(values, names, unit)
1752            elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
1753                sel = selector()
1754                savesel = self._getselection()
1755                iflist = self.getifnos()
1756                if len(freqs)>len(iflist):
1757                    raise ValueError("number of elements in list of list exeeds"
1758                                     " the current IF selections")
1759                for i, fval in enumerate(freqs):
1760                    sel.set_ifs(iflist[i])
1761                    self._setselection(sel)
1762                    self._setrestfreqs(fval, [""], unit)
1763                self._setselection(savesel)
1764        # freqs are to be taken from a linecatalog
1765        elif isinstance(freqs, linecatalog):
1766            sel = selector()
1767            savesel = self._getselection()
1768            for i in xrange(freqs.nrow()):
1769                sel.set_ifs(iflist[i])
1770                self._setselection(sel)
1771                self._setrestfreqs([freqs.get_frequency(i)],
1772                                   [freqs.get_name(i)], "MHz")
1773                # ensure that we are not iterating past nIF
1774                if i == self.nif()-1: break
1775            self._setselection(savesel)
1776        else:
1777            return
1778        self._add_history("set_restfreqs", varlist)
1779
1780    def shift_refpix(self, delta):
1781        """\
1782        Shift the reference pixel of the Spectra Coordinate by an
1783        integer amount.
1784
1785        Parameters:
1786
1787            delta:   the amount to shift by
1788
1789        *Note*:
1790
1791            Be careful using this with broadband data.
1792
1793        """
1794        Scantable.shift_refpix(self, delta)
1795
1796    @asaplog_post_dec
1797    def history(self, filename=None):
1798        """\
1799        Print the history. Optionally to a file.
1800
1801        Parameters:
1802
1803            filename:    The name of the file to save the history to.
1804
1805        """
1806        hist = list(self._gethistory())
1807        out = "-"*80
1808        for h in hist:
1809            if h.startswith("---"):
1810                out = "\n".join([out, h])
1811            else:
1812                items = h.split("##")
1813                date = items[0]
1814                func = items[1]
1815                items = items[2:]
1816                out += "\n"+date+"\n"
1817                out += "Function: %s\n  Parameters:" % (func)
1818                for i in items:
1819                    if i == '':
1820                        continue
1821                    s = i.split("=")
1822                    out += "\n   %s = %s" % (s[0], s[1])
1823                out = "\n".join([out, "-"*80])
1824        if filename is not None:
1825            if filename is "":
1826                filename = 'scantable_history.txt'
1827            import os
1828            filename = os.path.expandvars(os.path.expanduser(filename))
1829            if not os.path.isdir(filename):
1830                data = open(filename, 'w')
1831                data.write(out)
1832                data.close()
1833            else:
1834                msg = "Illegal file name '%s'." % (filename)
1835                raise IOError(msg)
1836        return page(out)
1837    #
1838    # Maths business
1839    #
1840    @asaplog_post_dec
1841    def average_time(self, mask=None, scanav=False, weight='tint', align=False):
1842        """\
1843        Return the (time) weighted average of a scan.
1844
1845        *Note*:
1846
1847            in channels only - align if necessary
1848
1849        Parameters:
1850
1851            mask:     an optional mask (only used for 'var' and 'tsys'
1852                      weighting)
1853
1854            scanav:   True averages each scan separately
1855                      False (default) averages all scans together,
1856
1857            weight:   Weighting scheme.
1858                      'none'     (mean no weight)
1859                      'var'      (1/var(spec) weighted)
1860                      'tsys'     (1/Tsys**2 weighted)
1861                      'tint'     (integration time weighted)
1862                      'tintsys'  (Tint/Tsys**2)
1863                      'median'   ( median averaging)
1864                      The default is 'tint'
1865
1866            align:    align the spectra in velocity before averaging. It takes
1867                      the time of the first spectrum as reference time.
1868
1869        Example::
1870
1871            # time average the scantable without using a mask
1872            newscan = scan.average_time()
1873
1874        """
1875        varlist = vars()
1876        weight = weight or 'TINT'
1877        mask = mask or ()
1878        scanav = (scanav and 'SCAN') or 'NONE'
1879        scan = (self, )
1880
1881        if align:
1882            scan = (self.freq_align(insitu=False), )
1883        s = None
1884        if weight.upper() == 'MEDIAN':
1885            s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1886                                                     scanav))
1887        else:
1888            s = scantable(self._math._average(scan, mask, weight.upper(),
1889                          scanav))
1890        s._add_history("average_time", varlist)
1891        return s
1892
1893    @asaplog_post_dec
1894    def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
1895        """\
1896        Return a scan where all spectra are converted to either
1897        Jansky or Kelvin depending upon the flux units of the scan table.
1898        By default the function tries to look the values up internally.
1899        If it can't find them (or if you want to over-ride), you must
1900        specify EITHER jyperk OR eta (and D which it will try to look up
1901        also if you don't set it). jyperk takes precedence if you set both.
1902
1903        Parameters:
1904
1905            jyperk:      the Jy / K conversion factor
1906
1907            eta:         the aperture efficiency
1908
1909            d:           the geometric diameter (metres)
1910
1911            insitu:      if False a new scantable is returned.
1912                         Otherwise, the scaling is done in-situ
1913                         The default is taken from .asaprc (False)
1914
1915        """
1916        if insitu is None: insitu = rcParams['insitu']
1917        self._math._setinsitu(insitu)
1918        varlist = vars()
1919        jyperk = jyperk or -1.0
1920        d = d or -1.0
1921        eta = eta or -1.0
1922        s = scantable(self._math._convertflux(self, d, eta, jyperk))
1923        s._add_history("convert_flux", varlist)
1924        if insitu: self._assign(s)
1925        else: return s
1926
1927    @asaplog_post_dec
1928    def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1929        """\
1930        Return a scan after applying a gain-elevation correction.
1931        The correction can be made via either a polynomial or a
1932        table-based interpolation (and extrapolation if necessary).
1933        You specify polynomial coefficients, an ascii table or neither.
1934        If you specify neither, then a polynomial correction will be made
1935        with built in coefficients known for certain telescopes (an error
1936        will occur if the instrument is not known).
1937        The data and Tsys are *divided* by the scaling factors.
1938
1939        Parameters:
1940
1941            poly:        Polynomial coefficients (default None) to compute a
1942                         gain-elevation correction as a function of
1943                         elevation (in degrees).
1944
1945            filename:    The name of an ascii file holding correction factors.
1946                         The first row of the ascii file must give the column
1947                         names and these MUST include columns
1948                         "ELEVATION" (degrees) and "FACTOR" (multiply data
1949                         by this) somewhere.
1950                         The second row must give the data type of the
1951                         column. Use 'R' for Real and 'I' for Integer.
1952                         An example file would be
1953                         (actual factors are arbitrary) :
1954
1955                         TIME ELEVATION FACTOR
1956                         R R R
1957                         0.1 0 0.8
1958                         0.2 20 0.85
1959                         0.3 40 0.9
1960                         0.4 60 0.85
1961                         0.5 80 0.8
1962                         0.6 90 0.75
1963
1964            method:      Interpolation method when correcting from a table.
1965                         Values are  "nearest", "linear" (default), "cubic"
1966                         and "spline"
1967
1968            insitu:      if False a new scantable is returned.
1969                         Otherwise, the scaling is done in-situ
1970                         The default is taken from .asaprc (False)
1971
1972        """
1973
1974        if insitu is None: insitu = rcParams['insitu']
1975        self._math._setinsitu(insitu)
1976        varlist = vars()
1977        poly = poly or ()
1978        from os.path import expandvars
1979        filename = expandvars(filename)
1980        s = scantable(self._math._gainel(self, poly, filename, method))
1981        s._add_history("gain_el", varlist)
1982        if insitu:
1983            self._assign(s)
1984        else:
1985            return s
1986
1987    @asaplog_post_dec
1988    def freq_align(self, reftime=None, method='cubic', insitu=None):
1989        """\
1990        Return a scan where all rows have been aligned in frequency/velocity.
1991        The alignment frequency frame (e.g. LSRK) is that set by function
1992        set_freqframe.
1993
1994        Parameters:
1995
1996            reftime:     reference time to align at. By default, the time of
1997                         the first row of data is used.
1998
1999            method:      Interpolation method for regridding the spectra.
2000                         Choose from "nearest", "linear", "cubic" (default)
2001                         and "spline"
2002
2003            insitu:      if False a new scantable is returned.
2004                         Otherwise, the scaling is done in-situ
2005                         The default is taken from .asaprc (False)
2006
2007        """
2008        if insitu is None: insitu = rcParams["insitu"]
2009        self._math._setinsitu(insitu)
2010        varlist = vars()
2011        reftime = reftime or ""
2012        s = scantable(self._math._freq_align(self, reftime, method))
2013        s._add_history("freq_align", varlist)
2014        if insitu: self._assign(s)
2015        else: return s
2016
2017    @asaplog_post_dec
2018    def opacity(self, tau=None, insitu=None):
2019        """\
2020        Apply an opacity correction. The data
2021        and Tsys are multiplied by the correction factor.
2022
2023        Parameters:
2024
2025            tau:         (list of) opacity from which the correction factor is
2026                         exp(tau*ZD)
2027                         where ZD is the zenith-distance.
2028                         If a list is provided, it has to be of length nIF,
2029                         nIF*nPol or 1 and in order of IF/POL, e.g.
2030                         [opif0pol0, opif0pol1, opif1pol0 ...]
2031                         if tau is `None` the opacities are determined from a
2032                         model.
2033
2034            insitu:      if False a new scantable is returned.
2035                         Otherwise, the scaling is done in-situ
2036                         The default is taken from .asaprc (False)
2037
2038        """
2039        if insitu is None: insitu = rcParams['insitu']
2040        self._math._setinsitu(insitu)
2041        varlist = vars()
2042        if not hasattr(tau, "__len__"):
2043            tau = [tau]
2044        s = scantable(self._math._opacity(self, tau))
2045        s._add_history("opacity", varlist)
2046        if insitu: self._assign(s)
2047        else: return s
2048
2049    @asaplog_post_dec
2050    def bin(self, width=5, insitu=None):
2051        """\
2052        Return a scan where all spectra have been binned up.
2053
2054        Parameters:
2055
2056            width:       The bin width (default=5) in pixels
2057
2058            insitu:      if False a new scantable is returned.
2059                         Otherwise, the scaling is done in-situ
2060                         The default is taken from .asaprc (False)
2061
2062        """
2063        if insitu is None: insitu = rcParams['insitu']
2064        self._math._setinsitu(insitu)
2065        varlist = vars()
2066        s = scantable(self._math._bin(self, width))
2067        s._add_history("bin", varlist)
2068        if insitu:
2069            self._assign(s)
2070        else:
2071            return s
2072
2073    @asaplog_post_dec
2074    def resample(self, width=5, method='cubic', insitu=None):
2075        """\
2076        Return a scan where all spectra have been binned up.
2077
2078        Parameters:
2079
2080            width:       The bin width (default=5) in pixels
2081
2082            method:      Interpolation method when correcting from a table.
2083                         Values are  "nearest", "linear", "cubic" (default)
2084                         and "spline"
2085
2086            insitu:      if False a new scantable is returned.
2087                         Otherwise, the scaling is done in-situ
2088                         The default is taken from .asaprc (False)
2089
2090        """
2091        if insitu is None: insitu = rcParams['insitu']
2092        self._math._setinsitu(insitu)
2093        varlist = vars()
2094        s = scantable(self._math._resample(self, method, width))
2095        s._add_history("resample", varlist)
2096        if insitu: self._assign(s)
2097        else: return s
2098
2099    @asaplog_post_dec
2100    def average_pol(self, mask=None, weight='none'):
2101        """\
2102        Average the Polarisations together.
2103
2104        Parameters:
2105
2106            mask:        An optional mask defining the region, where the
2107                         averaging will be applied. The output will have all
2108                         specified points masked.
2109
2110            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
2111                         weighted), or 'tsys' (1/Tsys**2 weighted)
2112
2113        """
2114        varlist = vars()
2115        mask = mask or ()
2116        s = scantable(self._math._averagepol(self, mask, weight.upper()))
2117        s._add_history("average_pol", varlist)
2118        return s
2119
2120    @asaplog_post_dec
2121    def average_beam(self, mask=None, weight='none'):
2122        """\
2123        Average the Beams together.
2124
2125        Parameters:
2126            mask:        An optional mask defining the region, where the
2127                         averaging will be applied. The output will have all
2128                         specified points masked.
2129
2130            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
2131                         weighted), or 'tsys' (1/Tsys**2 weighted)
2132
2133        """
2134        varlist = vars()
2135        mask = mask or ()
2136        s = scantable(self._math._averagebeams(self, mask, weight.upper()))
2137        s._add_history("average_beam", varlist)
2138        return s
2139
2140    def parallactify(self, pflag):
2141        """\
2142        Set a flag to indicate whether this data should be treated as having
2143        been 'parallactified' (total phase == 0.0)
2144
2145        Parameters:
2146
2147            pflag:  Bool indicating whether to turn this on (True) or
2148                    off (False)
2149
2150        """
2151        varlist = vars()
2152        self._parallactify(pflag)
2153        self._add_history("parallactify", varlist)
2154
2155    @asaplog_post_dec
2156    def convert_pol(self, poltype=None):
2157        """\
2158        Convert the data to a different polarisation type.
2159        Note that you will need cross-polarisation terms for most conversions.
2160
2161        Parameters:
2162
2163            poltype:    The new polarisation type. Valid types are:
2164                        "linear", "circular", "stokes" and "linpol"
2165
2166        """
2167        varlist = vars()
2168        s = scantable(self._math._convertpol(self, poltype))
2169        s._add_history("convert_pol", varlist)
2170        return s
2171
2172    @asaplog_post_dec
2173    def smooth(self, kernel="hanning", width=5.0, order=2, plot=False, insitu=None):
2174        """\
2175        Smooth the spectrum by the specified kernel (conserving flux).
2176
2177        Parameters:
2178
2179            kernel:     The type of smoothing kernel. Select from
2180                        'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
2181                        or 'poly'
2182
2183            width:      The width of the kernel in pixels. For hanning this is
2184                        ignored otherwise it defauls to 5 pixels.
2185                        For 'gaussian' it is the Full Width Half
2186                        Maximum. For 'boxcar' it is the full width.
2187                        For 'rmedian' and 'poly' it is the half width.
2188
2189            order:      Optional parameter for 'poly' kernel (default is 2), to
2190                        specify the order of the polnomial. Ignored by all other
2191                        kernels.
2192
2193            plot:       plot the original and the smoothed spectra.
2194                        In this each indivual fit has to be approved, by
2195                        typing 'y' or 'n'
2196
2197            insitu:     if False a new scantable is returned.
2198                        Otherwise, the scaling is done in-situ
2199                        The default is taken from .asaprc (False)
2200
2201        """
2202        if insitu is None: insitu = rcParams['insitu']
2203        self._math._setinsitu(insitu)
2204        varlist = vars()
2205
2206        if plot: orgscan = self.copy()
2207
2208        s = scantable(self._math._smooth(self, kernel.lower(), width, order))
2209        s._add_history("smooth", varlist)
2210
2211        if plot:
2212            from asap.asapplotter import new_asaplot
2213            theplot = new_asaplot(rcParams['plotter.gui'])
2214            theplot.set_panels()
2215            ylab=s._get_ordinate_label()
2216            #theplot.palette(0,["#777777","red"])
2217            for r in xrange(s.nrow()):
2218                xsm=s._getabcissa(r)
2219                ysm=s._getspectrum(r)
2220                xorg=orgscan._getabcissa(r)
2221                yorg=orgscan._getspectrum(r)
2222                theplot.clear()
2223                theplot.hold()
2224                theplot.set_axes('ylabel',ylab)
2225                theplot.set_axes('xlabel',s._getabcissalabel(r))
2226                theplot.set_axes('title',s._getsourcename(r))
2227                theplot.set_line(label='Original',color="#777777")
2228                theplot.plot(xorg,yorg)
2229                theplot.set_line(label='Smoothed',color="red")
2230                theplot.plot(xsm,ysm)
2231                ### Ugly part for legend
2232                for i in [0,1]:
2233                    theplot.subplots[0]['lines'].append([theplot.subplots[0]['axes'].lines[i]])
2234                theplot.release()
2235                ### Ugly part for legend
2236                theplot.subplots[0]['lines']=[]
2237                res = raw_input("Accept smoothing ([y]/n): ")
2238                if res.upper() == 'N':
2239                    s._setspectrum(yorg, r)
2240            theplot.quit()
2241            del theplot
2242            del orgscan
2243
2244        if insitu: self._assign(s)
2245        else: return s
2246
2247    @asaplog_post_dec
2248    def _parse_wn(self, wn):
2249        if isinstance(wn, list) or isinstance(wn, tuple):
2250            return wn
2251        elif isinstance(wn, int):
2252            return [ wn ]
2253        elif isinstance(wn, str):
2254            if '-' in wn:                            # case 'a-b' : return [a,a+1,...,b-1,b]
2255                val = wn.split('-')
2256                val = [int(val[0]), int(val[1])]
2257                val.sort()
2258                res = [i for i in xrange(val[0], val[1]+1)]
2259            elif wn[:2] == '<=' or wn[:2] == '=<':   # cases '<=a','=<a' : return [0,1,...,a-1,a]
2260                val = int(wn[2:])+1
2261                res = [i for i in xrange(val)]
2262            elif wn[-2:] == '>=' or wn[-2:] == '=>': # cases 'a>=','a=>' : return [0,1,...,a-1,a]
2263                val = int(wn[:-2])+1
2264                res = [i for i in xrange(val)]
2265            elif wn[0] == '<':                       # case '<a' :         return [0,1,...,a-2,a-1]
2266                val = int(wn[1:])
2267                res = [i for i in xrange(val)]
2268            elif wn[-1] == '>':                      # case 'a>' :         return [0,1,...,a-2,a-1]
2269                val = int(wn[:-1])
2270                res = [i for i in xrange(val)]
2271            elif wn[:2] == '>=' or wn[:2] == '=>':   # cases '>=a','=>a' : return [a,a+1,...,a_nyq]
2272                val = int(wn[2:])
2273                res = [i for i in xrange(val, self.nchan()/2+1)]
2274            elif wn[-2:] == '<=' or wn[-2:] == '=<': # cases 'a<=','a=<' : return [a,a+1,...,a_nyq]
2275                val = int(wn[:-2])
2276                res = [i for i in xrange(val, self.nchan()/2+1)]
2277            elif wn[0] == '>':                       # case '>a' :         return [a+1,a+2,...,a_nyq]
2278                val = int(wn[1:])+1
2279                res = [i for i in xrange(val, self.nchan()/2+1)]
2280            elif wn[-1] == '<':                      # case 'a<' :         return [a+1,a+2,...,a_nyq]
2281                val = int(wn[:-1])+1
2282                res = [i for i in xrange(val, self.nchan()/2+1)]
2283
2284            return res
2285        else:
2286            msg = 'wrong value given for addwn/rejwn'
2287            raise RuntimeError(msg)
2288
2289
2290    @asaplog_post_dec
2291    def sinusoid_baseline(self, insitu=None, mask=None, applyfft=None, fftmethod=None, fftthresh=None,
2292                          addwn=None, rejwn=None, clipthresh=None, clipniter=None, plot=None,
2293                          getresidual=None, outlog=None, blfile=None):
2294        """\
2295        Return a scan which has been baselined (all rows) with sinusoidal functions.
2296        Parameters:
2297            insitu:        if False a new scantable is returned.
2298                           Otherwise, the scaling is done in-situ
2299                           The default is taken from .asaprc (False)
2300            mask:          an optional mask
2301            applyfft:      if True use some method, such as FFT, to find
2302                           strongest sinusoidal components in the wavenumber
2303                           domain to be used for baseline fitting.
2304                           default is True.
2305            fftmethod:     method to find the strong sinusoidal components.
2306                           now only 'fft' is available and it is the default.
2307            fftthresh:     the threshold to select wave numbers to be used for
2308                           fitting from the distribution of amplitudes in the
2309                           wavenumber domain.
2310                           both float and string values accepted.
2311                           given a float value, the unit is set to sigma.
2312                           for string values, allowed formats include:
2313                               'xsigma' or 'x' (= x-sigma level. e.g., '3sigma'), or
2314                               'topx' (= the x strongest ones, e.g. 'top5').
2315                           default is 3.0 (unit: sigma).
2316            addwn:         the additional wave numbers to be used for fitting.
2317                           list or integer value is accepted to specify every
2318                           wave numbers. also string value can be used in case
2319                           you need to specify wave numbers in a certain range,
2320                           e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
2321                                 '<a'  (= 0,1,...,a-2,a-1),
2322                                 '>=a' (= a, a+1, ... up to the maximum wave
2323                                        number corresponding to the Nyquist
2324                                        frequency for the case of FFT).
2325                           default is [].
2326            rejwn:         the wave numbers NOT to be used for fitting.
2327                           can be set just as addwn but has higher priority:
2328                           wave numbers which are specified both in addwn
2329                           and rejwn will NOT be used. default is [].
2330            clipthresh:    Clipping threshold. (default is 3.0, unit: sigma)
2331            clipniter:     maximum number of iteration of 'clipthresh'-sigma clipping (default is 0)
2332            plot:      *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2333                           plot the fit and the residual. In this each
2334                           indivual fit has to be approved, by typing 'y'
2335                           or 'n'
2336            getresidual:   if False, returns best-fit values instead of
2337                           residual. (default is True)
2338            outlog:        Output the coefficients of the best-fit
2339                           function to logger (default is False)
2340            blfile:        Name of a text file in which the best-fit
2341                           parameter values to be written
2342                           (default is '': no file/logger output)
2343
2344        Example:
2345            # return a scan baselined by a combination of sinusoidal curves having
2346            # wave numbers in spectral window up to 10,
2347            # also with 3-sigma clipping, iteration up to 4 times
2348            bscan = scan.sinusoid_baseline(addwn='<=10',clipthresh=3.0,clipniter=4)
2349
2350        Note:
2351            The best-fit parameter values output in logger and/or blfile are now
2352            based on specunit of 'channel'.
2353        """
2354       
2355        try:
2356            varlist = vars()
2357       
2358            if insitu is None: insitu = rcParams['insitu']
2359            if insitu:
2360                workscan = self
2361            else:
2362                workscan = self.copy()
2363           
2364            if mask          is None: mask          = [True for i in xrange(workscan.nchan())]
2365            if applyfft      is None: applyfft      = True
2366            if fftmethod     is None: fftmethod     = 'fft'
2367            if fftthresh     is None: fftthresh     = 3.0
2368            if addwn         is None: addwn         = []
2369            if rejwn         is None: rejwn         = []
2370            if clipthresh    is None: clipthresh    = 3.0
2371            if clipniter     is None: clipniter     = 0
2372            if plot          is None: plot          = False
2373            if getresidual   is None: getresidual   = True
2374            if outlog        is None: outlog        = False
2375            if blfile        is None: blfile        = ''
2376
2377            #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
2378            workscan._sinusoid_baseline(mask, applyfft, fftmethod.lower(), str(fftthresh).lower(), workscan._parse_wn(addwn), workscan._parse_wn(rejwn), clipthresh, clipniter, getresidual, outlog, blfile)
2379            workscan._add_history('sinusoid_baseline', varlist)
2380           
2381            if insitu:
2382                self._assign(workscan)
2383            else:
2384                return workscan
2385           
2386        except RuntimeError, e:
2387            raise_fitting_failure_exception(e)
2388
2389
2390    @asaplog_post_dec
2391    def auto_sinusoid_baseline(self, insitu=None, mask=None, applyfft=None, fftmethod=None, fftthresh=None,
2392                               addwn=None, rejwn=None, clipthresh=None, clipniter=None, edge=None, threshold=None,
2393                               chan_avg_limit=None, plot=None, getresidual=None, outlog=None, blfile=None):
2394        """\
2395        Return a scan which has been baselined (all rows) with sinusoidal functions.
2396        Spectral lines are detected first using linefinder and masked out
2397        to avoid them affecting the baseline solution.
2398
2399        Parameters:
2400            insitu:        if False a new scantable is returned.
2401                           Otherwise, the scaling is done in-situ
2402                           The default is taken from .asaprc (False)
2403            mask:          an optional mask retreived from scantable
2404            applyfft:      if True use some method, such as FFT, to find
2405                           strongest sinusoidal components in the wavenumber
2406                           domain to be used for baseline fitting.
2407                           default is True.
2408            fftmethod:     method to find the strong sinusoidal components.
2409                           now only 'fft' is available and it is the default.
2410            fftthresh:     the threshold to select wave numbers to be used for
2411                           fitting from the distribution of amplitudes in the
2412                           wavenumber domain.
2413                           both float and string values accepted.
2414                           given a float value, the unit is set to sigma.
2415                           for string values, allowed formats include:
2416                               'xsigma' or 'x' (= x-sigma level. e.g., '3sigma'), or
2417                               'topx' (= the x strongest ones, e.g. 'top5').
2418                           default is 3.0 (unit: sigma).
2419            addwn:         the additional wave numbers to be used for fitting.
2420                           list or integer value is accepted to specify every
2421                           wave numbers. also string value can be used in case
2422                           you need to specify wave numbers in a certain range,
2423                           e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
2424                                 '<a'  (= 0,1,...,a-2,a-1),
2425                                 '>=a' (= a, a+1, ... up to the maximum wave
2426                                        number corresponding to the Nyquist
2427                                        frequency for the case of FFT).
2428                           default is [].
2429            rejwn:         the wave numbers NOT to be used for fitting.
2430                           can be set just as addwn but has higher priority:
2431                           wave numbers which are specified both in addwn
2432                           and rejwn will NOT be used. default is [].
2433            clipthresh:    Clipping threshold. (default is 3.0, unit: sigma)
2434            clipniter:     maximum number of iteration of 'clipthresh'-sigma clipping (default is 0)
2435            edge:          an optional number of channel to drop at
2436                           the edge of spectrum. If only one value is
2437                           specified, the same number will be dropped
2438                           from both sides of the spectrum. Default
2439                           is to keep all channels. Nested tuples
2440                           represent individual edge selection for
2441                           different IFs (a number of spectral channels
2442                           can be different)
2443            threshold:     the threshold used by line finder. It is
2444                           better to keep it large as only strong lines
2445                           affect the baseline solution.
2446            chan_avg_limit:a maximum number of consequtive spectral
2447                           channels to average during the search of
2448                           weak and broad lines. The default is no
2449                           averaging (and no search for weak lines).
2450                           If such lines can affect the fitted baseline
2451                           (e.g. a high order polynomial is fitted),
2452                           increase this parameter (usually values up
2453                           to 8 are reasonable). Most users of this
2454                           method should find the default value sufficient.
2455            plot:      *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2456                           plot the fit and the residual. In this each
2457                           indivual fit has to be approved, by typing 'y'
2458                           or 'n'
2459            getresidual:   if False, returns best-fit values instead of
2460                           residual. (default is True)
2461            outlog:        Output the coefficients of the best-fit
2462                           function to logger (default is False)
2463            blfile:        Name of a text file in which the best-fit
2464                           parameter values to be written
2465                           (default is "": no file/logger output)
2466
2467        Example:
2468            bscan = scan.auto_sinusoid_baseline(addwn='<=10', insitu=False)
2469       
2470        Note:
2471            The best-fit parameter values output in logger and/or blfile are now
2472            based on specunit of 'channel'.
2473        """
2474
2475        try:
2476            varlist = vars()
2477
2478            if insitu is None: insitu = rcParams['insitu']
2479            if insitu:
2480                workscan = self
2481            else:
2482                workscan = self.copy()
2483           
2484            if mask           is None: mask           = [True for i in xrange(workscan.nchan())]
2485            if applyfft       is None: applyfft       = True
2486            if fftmethod      is None: fftmethod      = 'fft'
2487            if fftthresh      is None: fftthresh      = 3.0
2488            if addwn          is None: addwn          = []
2489            if rejwn          is None: rejwn          = []
2490            if clipthresh     is None: clipthresh     = 3.0
2491            if clipniter      is None: clipniter      = 0
2492            if edge           is None: edge           = (0,0)
2493            if threshold      is None: threshold      = 3
2494            if chan_avg_limit is None: chan_avg_limit = 1
2495            if plot           is None: plot           = False
2496            if getresidual    is None: getresidual    = True
2497            if outlog         is None: outlog         = False
2498            if blfile         is None: blfile         = ''
2499
2500            #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method.
2501            workscan._auto_sinusoid_baseline(mask, applyfft, fftmethod.lower(), str(fftthresh).lower(), workscan._parse_wn(addwn), workscan._parse_wn(rejwn), clipthresh, clipniter, normalise_edge_param(edge), threshold, chan_avg_limit, getresidual, outlog, blfile)
2502            workscan._add_history("auto_sinusoid_baseline", varlist)
2503           
2504            if insitu:
2505                self._assign(workscan)
2506            else:
2507                return workscan
2508           
2509        except RuntimeError, e:
2510            raise_fitting_failure_exception(e)
2511
2512    @asaplog_post_dec
2513    def cspline_baseline(self, insitu=None, mask=None, npiece=None,
2514                         clipthresh=None, clipniter=None, plot=None, getresidual=None, outlog=None, blfile=None):
2515        """\
2516        Return a scan which has been baselined (all rows) by cubic spline function (piecewise cubic polynomial).
2517        Parameters:
2518            insitu:     If False a new scantable is returned.
2519                        Otherwise, the scaling is done in-situ
2520                        The default is taken from .asaprc (False)
2521            mask:       An optional mask
2522            npiece:     Number of pieces. (default is 2)
2523            clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2524            clipniter:  maximum number of iteration of 'clipthresh'-sigma clipping (default is 0)
2525            plot:   *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2526                        plot the fit and the residual. In this each
2527                        indivual fit has to be approved, by typing 'y'
2528                        or 'n'
2529            getresidual:if False, returns best-fit values instead of
2530                        residual. (default is True)
2531            outlog:     Output the coefficients of the best-fit
2532                        function to logger (default is False)
2533            blfile:     Name of a text file in which the best-fit
2534                        parameter values to be written
2535                        (default is "": no file/logger output)
2536
2537        Example:
2538            # return a scan baselined by a cubic spline consisting of 2 pieces (i.e., 1 internal knot),
2539            # also with 3-sigma clipping, iteration up to 4 times
2540            bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
2541       
2542        Note:
2543            The best-fit parameter values output in logger and/or blfile are now
2544            based on specunit of 'channel'.
2545        """
2546       
2547        try:
2548            varlist = vars()
2549           
2550            if insitu is None: insitu = rcParams['insitu']
2551            if insitu:
2552                workscan = self
2553            else:
2554                workscan = self.copy()
2555
2556            if mask        is None: mask        = [True for i in xrange(workscan.nchan())]
2557            if npiece      is None: npiece      = 2
2558            if clipthresh  is None: clipthresh  = 3.0
2559            if clipniter   is None: clipniter   = 0
2560            if plot        is None: plot        = False
2561            if getresidual is None: getresidual = True
2562            if outlog      is None: outlog      = False
2563            if blfile      is None: blfile      = ''
2564
2565            #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
2566            workscan._cspline_baseline(mask, npiece, clipthresh, clipniter, getresidual, outlog, blfile)
2567            workscan._add_history("cspline_baseline", varlist)
2568           
2569            if insitu:
2570                self._assign(workscan)
2571            else:
2572                return workscan
2573           
2574        except RuntimeError, e:
2575            raise_fitting_failure_exception(e)
2576
2577    @asaplog_post_dec
2578    def auto_cspline_baseline(self, insitu=None, mask=None, npiece=None, clipthresh=None,
2579                              clipniter=None, edge=None, threshold=None,
2580                              chan_avg_limit=None, getresidual=None, plot=None, outlog=None, blfile=None):
2581        """\
2582        Return a scan which has been baselined (all rows) by cubic spline
2583        function (piecewise cubic polynomial).
2584        Spectral lines are detected first using linefinder and masked out
2585        to avoid them affecting the baseline solution.
2586
2587        Parameters:
2588            insitu:     if False a new scantable is returned.
2589                        Otherwise, the scaling is done in-situ
2590                        The default is taken from .asaprc (False)
2591            mask:       an optional mask retreived from scantable
2592            npiece:     Number of pieces. (default is 2)
2593            clipthresh: Clipping threshold. (default is 3.0, unit: sigma)
2594            clipniter:  maximum number of iteration of 'clipthresh'-sigma clipping (default is 0)
2595            edge:       an optional number of channel to drop at
2596                        the edge of spectrum. If only one value is
2597                        specified, the same number will be dropped
2598                        from both sides of the spectrum. Default
2599                        is to keep all channels. Nested tuples
2600                        represent individual edge selection for
2601                        different IFs (a number of spectral channels
2602                        can be different)
2603            threshold:  the threshold used by line finder. It is
2604                        better to keep it large as only strong lines
2605                        affect the baseline solution.
2606            chan_avg_limit:
2607                        a maximum number of consequtive spectral
2608                        channels to average during the search of
2609                        weak and broad lines. The default is no
2610                        averaging (and no search for weak lines).
2611                        If such lines can affect the fitted baseline
2612                        (e.g. a high order polynomial is fitted),
2613                        increase this parameter (usually values up
2614                        to 8 are reasonable). Most users of this
2615                        method should find the default value sufficient.
2616            plot:   *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
2617                        plot the fit and the residual. In this each
2618                        indivual fit has to be approved, by typing 'y'
2619                        or 'n'
2620            getresidual:if False, returns best-fit values instead of
2621                        residual. (default is True)
2622            outlog:     Output the coefficients of the best-fit
2623                        function to logger (default is False)
2624            blfile:     Name of a text file in which the best-fit
2625                        parameter values to be written
2626                        (default is "": no file/logger output)
2627
2628        Example:
2629            bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
2630       
2631        Note:
2632            The best-fit parameter values output in logger and/or blfile are now
2633            based on specunit of 'channel'.
2634        """
2635
2636        try:
2637            varlist = vars()
2638
2639            if insitu is None: insitu = rcParams['insitu']
2640            if insitu:
2641                workscan = self
2642            else:
2643                workscan = self.copy()
2644           
2645            if mask           is None: mask           = [True for i in xrange(workscan.nchan())]
2646            if npiece         is None: npiece         = 2
2647            if clipthresh     is None: clipthresh     = 3.0
2648            if clipniter      is None: clipniter      = 0
2649            if edge           is None: edge           = (0, 0)
2650            if threshold      is None: threshold      = 3
2651            if chan_avg_limit is None: chan_avg_limit = 1
2652            if plot           is None: plot           = False
2653            if getresidual    is None: getresidual    = True
2654            if outlog         is None: outlog         = False
2655            if blfile         is None: blfile         = ''
2656
2657            #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
2658            workscan._auto_cspline_baseline(mask, npiece, clipthresh, clipniter, normalise_edge_param(edge), threshold, chan_avg_limit, getresidual, outlog, blfile)
2659            workscan._add_history("auto_cspline_baseline", varlist)
2660           
2661            if insitu:
2662                self._assign(workscan)
2663            else:
2664                return workscan
2665           
2666        except RuntimeError, e:
2667            raise_fitting_failure_exception(e)
2668
2669    @asaplog_post_dec
2670    def poly_baseline(self, insitu=None, mask=None, order=None, plot=None, getresidual=None, outlog=None, blfile=None):
2671        """\
2672        Return a scan which has been baselined (all rows) by a polynomial.
2673        Parameters:
2674            insitu:     if False a new scantable is returned.
2675                        Otherwise, the scaling is done in-situ
2676                        The default is taken from .asaprc (False)
2677            mask:       an optional mask
2678            order:      the order of the polynomial (default is 0)
2679            plot:       plot the fit and the residual. In this each
2680                        indivual fit has to be approved, by typing 'y'
2681                        or 'n'
2682            getresidual:if False, returns best-fit values instead of
2683                        residual. (default is True)
2684            outlog:     Output the coefficients of the best-fit
2685                        function to logger (default is False)
2686            blfile:     Name of a text file in which the best-fit
2687                        parameter values to be written
2688                        (default is "": no file/logger output)
2689
2690        Example:
2691            # return a scan baselined by a third order polynomial,
2692            # not using a mask
2693            bscan = scan.poly_baseline(order=3)
2694        """
2695       
2696        try:
2697            varlist = vars()
2698       
2699            if insitu is None: insitu = rcParams["insitu"]
2700            if insitu:
2701                workscan = self
2702            else:
2703                workscan = self.copy()
2704
2705            if mask        is None: mask        = [True for i in xrange(workscan.nchan())]
2706            if order       is None: order       = 0
2707            if plot        is None: plot        = False
2708            if getresidual is None: getresidual = True
2709            if outlog      is None: outlog      = False
2710            if blfile      is None: blfile      = ""
2711
2712            if plot:
2713                outblfile = (blfile != "") and os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
2714                if outblfile: blf = open(blfile, "a")
2715               
2716                f = fitter()
2717                f.set_function(lpoly=order)
2718               
2719                rows = xrange(workscan.nrow())
2720                #if len(rows) > 0: workscan._init_blinfo()
2721               
2722                for r in rows:
2723                    f.x = workscan._getabcissa(r)
2724                    f.y = workscan._getspectrum(r)
2725                    f.mask = mask_and(mask, workscan._getmask(r))    # (CAS-1434)
2726                    f.data = None
2727                    f.fit()
2728                   
2729                    f.plot(residual=True)
2730                    accept_fit = raw_input("Accept fit ( [y]/n ): ")
2731                    if accept_fit.upper() == "N":
2732                        #workscan._append_blinfo(None, None, None)
2733                        continue
2734                   
2735                    blpars = f.get_parameters()
2736                    masklist = workscan.get_masklist(f.mask, row=r, silent=True)
2737                    #workscan._append_blinfo(blpars, masklist, f.mask)
2738                    workscan._setspectrum((f.fitter.getresidual() if getresidual else f.fitter.getfit()), r)
2739                   
2740                    if outblfile:
2741                        rms = workscan.get_rms(f.mask, r)
2742                        dataout = workscan.format_blparams_row(blpars["params"], blpars["fixed"], rms, str(masklist), r, True)
2743                        blf.write(dataout)
2744
2745                f._p.unmap()
2746                f._p = None
2747
2748                if outblfile: blf.close()
2749            else:
2750                workscan._poly_baseline(mask, order, getresidual, outlog, blfile)
2751           
2752            workscan._add_history("poly_baseline", varlist)
2753           
2754            if insitu:
2755                self._assign(workscan)
2756            else:
2757                return workscan
2758           
2759        except RuntimeError, e:
2760            raise_fitting_failure_exception(e)
2761
2762    @asaplog_post_dec
2763    def auto_poly_baseline(self, insitu=None, mask=None, order=None, edge=None, threshold=None,
2764                           chan_avg_limit=None, plot=None, getresidual=None, outlog=None, blfile=None):
2765        """\
2766        Return a scan which has been baselined (all rows) by a polynomial.
2767        Spectral lines are detected first using linefinder and masked out
2768        to avoid them affecting the baseline solution.
2769
2770        Parameters:
2771            insitu:     if False a new scantable is returned.
2772                        Otherwise, the scaling is done in-situ
2773                        The default is taken from .asaprc (False)
2774            mask:       an optional mask retreived from scantable
2775            order:      the order of the polynomial (default is 0)
2776            edge:       an optional number of channel to drop at
2777                        the edge of spectrum. If only one value is
2778                        specified, the same number will be dropped
2779                        from both sides of the spectrum. Default
2780                        is to keep all channels. Nested tuples
2781                        represent individual edge selection for
2782                        different IFs (a number of spectral channels
2783                        can be different)
2784            threshold:  the threshold used by line finder. It is
2785                        better to keep it large as only strong lines
2786                        affect the baseline solution.
2787            chan_avg_limit:
2788                        a maximum number of consequtive spectral
2789                        channels to average during the search of
2790                        weak and broad lines. The default is no
2791                        averaging (and no search for weak lines).
2792                        If such lines can affect the fitted baseline
2793                        (e.g. a high order polynomial is fitted),
2794                        increase this parameter (usually values up
2795                        to 8 are reasonable). Most users of this
2796                        method should find the default value sufficient.
2797            plot:       plot the fit and the residual. In this each
2798                        indivual fit has to be approved, by typing 'y'
2799                        or 'n'
2800            getresidual:if False, returns best-fit values instead of
2801                        residual. (default is True)
2802            outlog:     Output the coefficients of the best-fit
2803                        function to logger (default is False)
2804            blfile:     Name of a text file in which the best-fit
2805                        parameter values to be written
2806                        (default is "": no file/logger output)
2807
2808        Example:
2809            bscan = scan.auto_poly_baseline(order=7, insitu=False)
2810        """
2811
2812        try:
2813            varlist = vars()
2814
2815            if insitu is None: insitu = rcParams['insitu']
2816            if insitu:
2817                workscan = self
2818            else:
2819                workscan = self.copy()
2820
2821            if mask           is None: mask           = [True for i in xrange(workscan.nchan())]
2822            if order          is None: order          = 0
2823            if edge           is None: edge           = (0, 0)
2824            if threshold      is None: threshold      = 3
2825            if chan_avg_limit is None: chan_avg_limit = 1
2826            if plot           is None: plot           = False
2827            if getresidual    is None: getresidual    = True
2828            if outlog         is None: outlog         = False
2829            if blfile         is None: blfile         = ''
2830
2831            edge = normalise_edge_param(edge)
2832
2833            if plot:
2834                outblfile = (blfile != "") and os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
2835                if outblfile: blf = open(blfile, "a")
2836               
2837                from asap.asaplinefind import linefinder
2838                fl = linefinder()
2839                fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
2840                fl.set_scan(workscan)
2841               
2842                f = fitter()
2843                f.set_function(lpoly=order)
2844
2845                rows = xrange(workscan.nrow())
2846                #if len(rows) > 0: workscan._init_blinfo()
2847               
2848                for r in rows:
2849                    idx = 2*workscan.getif(r)
2850                    fl.find_lines(r, mask_and(mask, workscan._getmask(r)), edge[idx:idx+2])  # (CAS-1434)
2851
2852                    f.x = workscan._getabcissa(r)
2853                    f.y = workscan._getspectrum(r)
2854                    f.mask = fl.get_mask()
2855                    f.data = None
2856                    f.fit()
2857
2858                    f.plot(residual=True)
2859                    accept_fit = raw_input("Accept fit ( [y]/n ): ")
2860                    if accept_fit.upper() == "N":
2861                        #workscan._append_blinfo(None, None, None)
2862                        continue
2863
2864                    blpars = f.get_parameters()
2865                    masklist = workscan.get_masklist(f.mask, row=r, silent=True)
2866                    #workscan._append_blinfo(blpars, masklist, f.mask)
2867                    workscan._setspectrum((f.fitter.getresidual() if getresidual else f.fitter.getfit()), r)
2868
2869                    if outblfile:
2870                        rms = workscan.get_rms(f.mask, r)
2871                        dataout = workscan.format_blparams_row(blpars["params"], blpars["fixed"], rms, str(masklist), r, True)
2872                        blf.write(dataout)
2873                   
2874                f._p.unmap()
2875                f._p = None
2876
2877                if outblfile: blf.close()
2878            else:
2879                workscan._auto_poly_baseline(mask, order, edge, threshold, chan_avg_limit, getresidual, outlog, blfile)
2880
2881            workscan._add_history("auto_poly_baseline", varlist)
2882           
2883            if insitu:
2884                self._assign(workscan)
2885            else:
2886                return workscan
2887           
2888        except RuntimeError, e:
2889            raise_fitting_failure_exception(e)
2890
2891    def _init_blinfo(self):
2892        """\
2893        Initialise the following three auxiliary members:
2894           blpars : parameters of the best-fit baseline,
2895           masklists : mask data (edge positions of masked channels) and
2896           actualmask : mask data (in boolean list),
2897        to keep for use later (including output to logger/text files).
2898        Used by poly_baseline() and auto_poly_baseline() in case of
2899        'plot=True'.
2900        """
2901        self.blpars = []
2902        self.masklists = []
2903        self.actualmask = []
2904        return
2905
2906    def _append_blinfo(self, data_blpars, data_masklists, data_actualmask):
2907        """\
2908        Append baseline-fitting related info to blpars, masklist and
2909        actualmask.
2910        """
2911        self.blpars.append(data_blpars)
2912        self.masklists.append(data_masklists)
2913        self.actualmask.append(data_actualmask)
2914        return
2915       
2916    @asaplog_post_dec
2917    def rotate_linpolphase(self, angle):
2918        """\
2919        Rotate the phase of the complex polarization O=Q+iU correlation.
2920        This is always done in situ in the raw data.  So if you call this
2921        function more than once then each call rotates the phase further.
2922
2923        Parameters:
2924
2925            angle:   The angle (degrees) to rotate (add) by.
2926
2927        Example::
2928
2929            scan.rotate_linpolphase(2.3)
2930
2931        """
2932        varlist = vars()
2933        self._math._rotate_linpolphase(self, angle)
2934        self._add_history("rotate_linpolphase", varlist)
2935        return
2936
2937    @asaplog_post_dec
2938    def rotate_xyphase(self, angle):
2939        """\
2940        Rotate the phase of the XY correlation.  This is always done in situ
2941        in the data.  So if you call this function more than once
2942        then each call rotates the phase further.
2943
2944        Parameters:
2945
2946            angle:   The angle (degrees) to rotate (add) by.
2947
2948        Example::
2949
2950            scan.rotate_xyphase(2.3)
2951
2952        """
2953        varlist = vars()
2954        self._math._rotate_xyphase(self, angle)
2955        self._add_history("rotate_xyphase", varlist)
2956        return
2957
2958    @asaplog_post_dec
2959    def swap_linears(self):
2960        """\
2961        Swap the linear polarisations XX and YY, or better the first two
2962        polarisations as this also works for ciculars.
2963        """
2964        varlist = vars()
2965        self._math._swap_linears(self)
2966        self._add_history("swap_linears", varlist)
2967        return
2968
2969    @asaplog_post_dec
2970    def invert_phase(self):
2971        """\
2972        Invert the phase of the complex polarisation
2973        """
2974        varlist = vars()
2975        self._math._invert_phase(self)
2976        self._add_history("invert_phase", varlist)
2977        return
2978
2979    @asaplog_post_dec
2980    def add(self, offset, insitu=None):
2981        """\
2982        Return a scan where all spectra have the offset added
2983
2984        Parameters:
2985
2986            offset:      the offset
2987
2988            insitu:      if False a new scantable is returned.
2989                         Otherwise, the scaling is done in-situ
2990                         The default is taken from .asaprc (False)
2991
2992        """
2993        if insitu is None: insitu = rcParams['insitu']
2994        self._math._setinsitu(insitu)
2995        varlist = vars()
2996        s = scantable(self._math._unaryop(self, offset, "ADD", False))
2997        s._add_history("add", varlist)
2998        if insitu:
2999            self._assign(s)
3000        else:
3001            return s
3002
3003    @asaplog_post_dec
3004    def scale(self, factor, tsys=True, insitu=None):
3005        """\
3006
3007        Return a scan where all spectra are scaled by the given 'factor'
3008
3009        Parameters:
3010
3011            factor:      the scaling factor (float or 1D float list)
3012
3013            insitu:      if False a new scantable is returned.
3014                         Otherwise, the scaling is done in-situ
3015                         The default is taken from .asaprc (False)
3016
3017            tsys:        if True (default) then apply the operation to Tsys
3018                         as well as the data
3019
3020        """
3021        if insitu is None: insitu = rcParams['insitu']
3022        self._math._setinsitu(insitu)
3023        varlist = vars()
3024        s = None
3025        import numpy
3026        if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
3027            if isinstance(factor[0], list) or isinstance(factor[0], numpy.ndarray):
3028                from asapmath import _array2dOp
3029                s = _array2dOp( self.copy(), factor, "MUL", tsys )
3030            else:
3031                s = scantable( self._math._arrayop( self.copy(), factor, "MUL", tsys ) )
3032        else:
3033            s = scantable(self._math._unaryop(self.copy(), factor, "MUL", tsys))
3034        s._add_history("scale", varlist)
3035        if insitu:
3036            self._assign(s)
3037        else:
3038            return s
3039
3040    def set_sourcetype(self, match, matchtype="pattern",
3041                       sourcetype="reference"):
3042        """\
3043        Set the type of the source to be an source or reference scan
3044        using the provided pattern.
3045
3046        Parameters:
3047
3048            match:          a Unix style pattern, regular expression or selector
3049
3050            matchtype:      'pattern' (default) UNIX style pattern or
3051                            'regex' regular expression
3052
3053            sourcetype:     the type of the source to use (source/reference)
3054
3055        """
3056        varlist = vars()
3057        basesel = self.get_selection()
3058        stype = -1
3059        if sourcetype.lower().startswith("r"):
3060            stype = 1
3061        elif sourcetype.lower().startswith("s"):
3062            stype = 0
3063        else:
3064            raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
3065        if matchtype.lower().startswith("p"):
3066            matchtype = "pattern"
3067        elif matchtype.lower().startswith("r"):
3068            matchtype = "regex"
3069        else:
3070            raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
3071        sel = selector()
3072        if isinstance(match, selector):
3073            sel = match
3074        else:
3075            sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
3076        self.set_selection(basesel+sel)
3077        self._setsourcetype(stype)
3078        self.set_selection(basesel)
3079        self._add_history("set_sourcetype", varlist)
3080
3081    @asaplog_post_dec
3082    @preserve_selection
3083    def auto_quotient(self, preserve=True, mode='paired', verify=False):
3084        """\
3085        This function allows to build quotients automatically.
3086        It assumes the observation to have the same number of
3087        "ons" and "offs"
3088
3089        Parameters:
3090
3091            preserve:       you can preserve (default) the continuum or
3092                            remove it.  The equations used are
3093
3094                            preserve: Output = Toff * (on/off) - Toff
3095
3096                            remove:   Output = Toff * (on/off) - Ton
3097
3098            mode:           the on/off detection mode
3099                            'paired' (default)
3100                            identifies 'off' scans by the
3101                            trailing '_R' (Mopra/Parkes) or
3102                            '_e'/'_w' (Tid) and matches
3103                            on/off pairs from the observing pattern
3104                            'time'
3105                            finds the closest off in time
3106
3107        .. todo:: verify argument is not implemented
3108
3109        """
3110        varlist = vars()
3111        modes = ["time", "paired"]
3112        if not mode in modes:
3113            msg = "please provide valid mode. Valid modes are %s" % (modes)
3114            raise ValueError(msg)
3115        s = None
3116        if mode.lower() == "paired":
3117            sel = self.get_selection()
3118            sel.set_query("SRCTYPE==psoff")
3119            self.set_selection(sel)
3120            offs = self.copy()
3121            sel.set_query("SRCTYPE==pson")
3122            self.set_selection(sel)
3123            ons = self.copy()
3124            s = scantable(self._math._quotient(ons, offs, preserve))
3125        elif mode.lower() == "time":
3126            s = scantable(self._math._auto_quotient(self, mode, preserve))
3127        s._add_history("auto_quotient", varlist)
3128        return s
3129
3130    @asaplog_post_dec
3131    def mx_quotient(self, mask = None, weight='median', preserve=True):
3132        """\
3133        Form a quotient using "off" beams when observing in "MX" mode.
3134
3135        Parameters:
3136
3137            mask:           an optional mask to be used when weight == 'stddev'
3138
3139            weight:         How to average the off beams.  Default is 'median'.
3140
3141            preserve:       you can preserve (default) the continuum or
3142                            remove it.  The equations used are:
3143
3144                                preserve: Output = Toff * (on/off) - Toff
3145
3146                                remove:   Output = Toff * (on/off) - Ton
3147
3148        """
3149        mask = mask or ()
3150        varlist = vars()
3151        on = scantable(self._math._mx_extract(self, 'on'))
3152        preoff = scantable(self._math._mx_extract(self, 'off'))
3153        off = preoff.average_time(mask=mask, weight=weight, scanav=False)
3154        from asapmath  import quotient
3155        q = quotient(on, off, preserve)
3156        q._add_history("mx_quotient", varlist)
3157        return q
3158
3159    @asaplog_post_dec
3160    def freq_switch(self, insitu=None):
3161        """\
3162        Apply frequency switching to the data.
3163
3164        Parameters:
3165
3166            insitu:      if False a new scantable is returned.
3167                         Otherwise, the swictching is done in-situ
3168                         The default is taken from .asaprc (False)
3169
3170        """
3171        if insitu is None: insitu = rcParams['insitu']
3172        self._math._setinsitu(insitu)
3173        varlist = vars()
3174        s = scantable(self._math._freqswitch(self))
3175        s._add_history("freq_switch", varlist)
3176        if insitu:
3177            self._assign(s)
3178        else:
3179            return s
3180
3181    @asaplog_post_dec
3182    def recalc_azel(self):
3183        """Recalculate the azimuth and elevation for each position."""
3184        varlist = vars()
3185        self._recalcazel()
3186        self._add_history("recalc_azel", varlist)
3187        return
3188
3189    @asaplog_post_dec
3190    def __add__(self, other):
3191        varlist = vars()
3192        s = None
3193        if isinstance(other, scantable):
3194            s = scantable(self._math._binaryop(self, other, "ADD"))
3195        elif isinstance(other, float):
3196            s = scantable(self._math._unaryop(self, other, "ADD", False))
3197        elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3198            if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
3199                from asapmath import _array2dOp
3200                s = _array2dOp( self.copy(), other, "ADD", False )
3201            else:
3202                s = scantable( self._math._arrayop( self.copy(), other, "ADD", False ) )
3203        else:
3204            raise TypeError("Other input is not a scantable or float value")
3205        s._add_history("operator +", varlist)
3206        return s
3207
3208    @asaplog_post_dec
3209    def __sub__(self, other):
3210        """
3211        implicit on all axes and on Tsys
3212        """
3213        varlist = vars()
3214        s = None
3215        if isinstance(other, scantable):
3216            s = scantable(self._math._binaryop(self, other, "SUB"))
3217        elif isinstance(other, float):
3218            s = scantable(self._math._unaryop(self, other, "SUB", False))
3219        elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3220            if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
3221                from asapmath import _array2dOp
3222                s = _array2dOp( self.copy(), other, "SUB", False )
3223            else:
3224                s = scantable( self._math._arrayop( self.copy(), other, "SUB", False ) )
3225        else:
3226            raise TypeError("Other input is not a scantable or float value")
3227        s._add_history("operator -", varlist)
3228        return s
3229
3230    @asaplog_post_dec
3231    def __mul__(self, other):
3232        """
3233        implicit on all axes and on Tsys
3234        """
3235        varlist = vars()
3236        s = None
3237        if isinstance(other, scantable):
3238            s = scantable(self._math._binaryop(self, other, "MUL"))
3239        elif isinstance(other, float):
3240            s = scantable(self._math._unaryop(self, other, "MUL", False))
3241        elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3242            if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
3243                from asapmath import _array2dOp
3244                s = _array2dOp( self.copy(), other, "MUL", False )
3245            else:
3246                s = scantable( self._math._arrayop( self.copy(), other, "MUL", False ) )
3247        else:
3248            raise TypeError("Other input is not a scantable or float value")
3249        s._add_history("operator *", varlist)
3250        return s
3251
3252
3253    @asaplog_post_dec
3254    def __div__(self, other):
3255        """
3256        implicit on all axes and on Tsys
3257        """
3258        varlist = vars()
3259        s = None
3260        if isinstance(other, scantable):
3261            s = scantable(self._math._binaryop(self, other, "DIV"))
3262        elif isinstance(other, float):
3263            if other == 0.0:
3264                raise ZeroDivisionError("Dividing by zero is not recommended")
3265            s = scantable(self._math._unaryop(self, other, "DIV", False))
3266        elif isinstance(other, list) or isinstance(other, numpy.ndarray):
3267            if isinstance(other[0], list) or isinstance(other[0], numpy.ndarray):
3268                from asapmath import _array2dOp
3269                s = _array2dOp( self.copy(), other, "DIV", False )
3270            else:
3271                s = scantable( self._math._arrayop( self.copy(), other, "DIV", False ) )
3272        else:
3273            raise TypeError("Other input is not a scantable or float value")
3274        s._add_history("operator /", varlist)
3275        return s
3276
3277    @asaplog_post_dec
3278    def get_fit(self, row=0):
3279        """\
3280        Print or return the stored fits for a row in the scantable
3281
3282        Parameters:
3283
3284            row:    the row which the fit has been applied to.
3285
3286        """
3287        if row > self.nrow():
3288            return
3289        from asap.asapfit import asapfit
3290        fit = asapfit(self._getfit(row))
3291        asaplog.push( '%s' %(fit) )
3292        return fit.as_dict()
3293
3294    def flag_nans(self):
3295        """\
3296        Utility function to flag NaN values in the scantable.
3297        """
3298        import numpy
3299        basesel = self.get_selection()
3300        for i in range(self.nrow()):
3301            sel = self.get_row_selector(i)
3302            self.set_selection(basesel+sel)
3303            nans = numpy.isnan(self._getspectrum(0))
3304        if numpy.any(nans):
3305            bnans = [ bool(v) for v in nans]
3306            self.flag(bnans)
3307        self.set_selection(basesel)
3308
3309    def get_row_selector(self, rowno):
3310        #return selector(beams=self.getbeam(rowno),
3311        #                ifs=self.getif(rowno),
3312        #                pols=self.getpol(rowno),
3313        #                scans=self.getscan(rowno),
3314        #                cycles=self.getcycle(rowno))
3315        return selector(rows=[rowno])
3316
3317    def _add_history(self, funcname, parameters):
3318        if not rcParams['scantable.history']:
3319            return
3320        # create date
3321        sep = "##"
3322        from datetime import datetime
3323        dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
3324        hist = dstr+sep
3325        hist += funcname+sep#cdate+sep
3326        if parameters.has_key('self'): del parameters['self']
3327        for k, v in parameters.iteritems():
3328            if type(v) is dict:
3329                for k2, v2 in v.iteritems():
3330                    hist += k2
3331                    hist += "="
3332                    if isinstance(v2, scantable):
3333                        hist += 'scantable'
3334                    elif k2 == 'mask':
3335                        if isinstance(v2, list) or isinstance(v2, tuple):
3336                            hist += str(self._zip_mask(v2))
3337                        else:
3338                            hist += str(v2)
3339                    else:
3340                        hist += str(v2)
3341            else:
3342                hist += k
3343                hist += "="
3344                if isinstance(v, scantable):
3345                    hist += 'scantable'
3346                elif k == 'mask':
3347                    if isinstance(v, list) or isinstance(v, tuple):
3348                        hist += str(self._zip_mask(v))
3349                    else:
3350                        hist += str(v)
3351                else:
3352                    hist += str(v)
3353            hist += sep
3354        hist = hist[:-2] # remove trailing '##'
3355        self._addhistory(hist)
3356
3357
3358    def _zip_mask(self, mask):
3359        mask = list(mask)
3360        i = 0
3361        segments = []
3362        while mask[i:].count(1):
3363            i += mask[i:].index(1)
3364            if mask[i:].count(0):
3365                j = i + mask[i:].index(0)
3366            else:
3367                j = len(mask)
3368            segments.append([i, j])
3369            i = j
3370        return segments
3371
3372    def _get_ordinate_label(self):
3373        fu = "("+self.get_fluxunit()+")"
3374        import re
3375        lbl = "Intensity"
3376        if re.match(".K.", fu):
3377            lbl = "Brightness Temperature "+ fu
3378        elif re.match(".Jy.", fu):
3379            lbl = "Flux density "+ fu
3380        return lbl
3381
3382    def _check_ifs(self):
3383        #nchans = [self.nchan(i) for i in range(self.nif(-1))]
3384        nchans = [self.nchan(i) for i in self.getifnos()]
3385        nchans = filter(lambda t: t > 0, nchans)
3386        return (sum(nchans)/len(nchans) == nchans[0])
3387
3388    @asaplog_post_dec
3389    #def _fill(self, names, unit, average, getpt, antenna):
3390    def _fill(self, names, unit, average, opts={}):
3391        first = True
3392        fullnames = []
3393        for name in names:
3394            name = os.path.expandvars(name)
3395            name = os.path.expanduser(name)
3396            if not os.path.exists(name):
3397                msg = "File '%s' does not exists" % (name)
3398                raise IOError(msg)
3399            fullnames.append(name)
3400        if average:
3401            asaplog.push('Auto averaging integrations')
3402        stype = int(rcParams['scantable.storage'].lower() == 'disk')
3403        for name in fullnames:
3404            tbl = Scantable(stype)
3405            if is_ms( name ):
3406                r = msfiller( tbl )
3407            else:
3408                r = filler( tbl )
3409                rx = rcParams['scantable.reference']
3410                r.setreferenceexpr(rx)
3411            #r = filler(tbl)
3412            #rx = rcParams['scantable.reference']
3413            #r.setreferenceexpr(rx)
3414            msg = "Importing %s..." % (name)
3415            asaplog.push(msg, False)
3416            #opts = {'ms': {'antenna' : antenna, 'getpt': getpt} }
3417            r.open(name, opts)# antenna, -1, -1, getpt)
3418            r.fill()
3419            if average:
3420                tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
3421            if not first:
3422                tbl = self._math._merge([self, tbl])
3423            Scantable.__init__(self, tbl)
3424            r.close()
3425            del r, tbl
3426            first = False
3427            #flush log
3428        asaplog.post()
3429        if unit is not None:
3430            self.set_fluxunit(unit)
3431        if not is_casapy():
3432            self.set_freqframe(rcParams['scantable.freqframe'])
3433
3434
3435    def __getitem__(self, key):
3436        if key < 0:
3437            key += self.nrow()
3438        if key >= self.nrow():
3439            raise IndexError("Row index out of range.")
3440        return self._getspectrum(key)
3441
3442    def __setitem__(self, key, value):
3443        if key < 0:
3444            key += self.nrow()
3445        if key >= self.nrow():
3446            raise IndexError("Row index out of range.")
3447        if not hasattr(value, "__len__") or \
3448                len(value) > self.nchan(self.getif(key)):
3449            raise ValueError("Spectrum length doesn't match.")
3450        return self._setspectrum(value, key)
3451
3452    def __len__(self):
3453        return self.nrow()
3454
3455    def __iter__(self):
3456        for i in range(len(self)):
3457            yield self[i]
Note: See TracBrowser for help on using the repository browser.