source: branches/alma/python/scantable.py @ 1603

Last change on this file since 1603 was 1603, checked in by TakTsutsumi, 15 years ago

New Development: No, merge with asap2.3.1

JIRA Issue: Yes CAS-1450

Ready to Release: Yes/No?

Interface Changes: Yes/No?

What Interface Changed: Please list interface changes

Test Programs: List test programs

Put in Release Notes: Yes

Module(s): single dish

Description: Upgrade of alma branch based on ASAP2.2.0

(rev.1562) to ASAP2.3.1 (rev.1561)


  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 78.4 KB
Line 
1from asap._asap import Scantable
2from asap import rcParams
3from asap import print_log
4from asap import asaplog
5from asap import selector
6from asap import linecatalog
7from asap import _n_bools, mask_not, mask_and, mask_or
8
9class scantable(Scantable):
10    """
11        The ASAP container for scans
12    """
13
14    def __init__(self, filename, average=None, unit=None, getpt=None):
15        """
16        Create a scantable from a saved one or make a reference
17        Parameters:
18            filename:    the name of an asap table on disk
19                         or
20                         the name of a rpfits/sdfits/ms file
21                         (integrations within scans are auto averaged
22                         and the whole file is read)
23                         or
24                         [advanced] a reference to an existing
25                         scantable
26            average:     average all integrations withinb a scan on read.
27                         The default (True) is taken from .asaprc.
28            unit:         brightness unit; must be consistent with K or Jy.
29                         Over-rides the default selected by the reader
30                         (input rpfits/sdfits/ms) or replaces the value
31                         in existing scantables
32            getpt:       for MeasurementSet input data only:
33                         If True, all pointing data are filled.
34                         The deafult is False, which makes time to load
35                         the MS data faster in some cases.
36        """
37        if average is None:
38            average = rcParams['scantable.autoaverage']
39        if getpt is None:
40            getpt = False
41        varlist = vars()
42        from asap._asap import stmath
43        self._math = stmath()
44        if isinstance(filename, Scantable):
45            Scantable.__init__(self, filename)
46        else:
47            if isinstance(filename, str):# or \
48#                (isinstance(filename, list) or isinstance(filename, tuple)) \
49#                  and isinstance(filename[-1], str):
50                import os.path
51                filename = os.path.expandvars(filename)
52                filename = os.path.expanduser(filename)
53                if not os.path.exists(filename):
54                    s = "File '%s' not found." % (filename)
55                    if rcParams['verbose']:
56                        asaplog.push(s)
57                        print asaplog.pop().strip()
58                        return
59                    raise IOError(s)
60                if os.path.isdir(filename) \
61                    and not os.path.exists(filename+'/table.f1'):
62                    # crude check if asap table
63                    if os.path.exists(filename+'/table.info'):
64                        ondisk = rcParams['scantable.storage'] == 'disk'
65                        Scantable.__init__(self, filename, ondisk)
66                        if unit is not None:
67                            self.set_fluxunit(unit)
68                        # do not reset to the default freqframe
69                        #self.set_freqframe(rcParams['scantable.freqframe'])
70                    else:
71                        msg = "The given file '%s'is not a valid " \
72                              "asap table." % (filename)
73                        if rcParams['verbose']:
74                            print msg
75                            return
76                        else:
77                            raise IOError(msg)
78                else:
79                    self._fill([filename], unit, average, getpt)
80            elif (isinstance(filename, list) or isinstance(filename, tuple)) \
81                  and isinstance(filename[-1], str):
82                self._fill(filename, unit, average, getpt)
83        self._add_history("scantable", varlist)
84        print_log()
85
86    def save(self, name=None, format=None, overwrite=False):
87        """
88        Store the scantable on disk. This can be an asap (aips++) Table,
89        SDFITS or MS2 format.
90        Parameters:
91            name:        the name of the outputfile. For format "ASCII"
92                         this is the root file name (data in 'name'.txt
93                         and header in 'name'_header.txt)
94            format:      an optional file format. Default is ASAP.
95                         Allowed are - 'ASAP' (save as ASAP [aips++] Table),
96                                       'SDFITS' (save as SDFITS file)
97                                       'ASCII' (saves as ascii text file)
98                                       'MS2' (saves as an aips++
99                                              MeasurementSet V2)
100                                       'FITS' (save as image FITS - not
101                                               readable by class)
102                                       'CLASS' (save as FITS readable by CLASS)
103            overwrite:   If the file should be overwritten if it exists.
104                         The default False is to return with warning
105                         without writing the output. USE WITH CARE.
106        Example:
107            scan.save('myscan.asap')
108            scan.save('myscan.sdfits', 'SDFITS')
109        """
110        from os import path
111        if format is None: format = rcParams['scantable.save']
112        suffix = '.'+format.lower()
113        if name is None or name == "":
114            name = 'scantable'+suffix
115            msg = "No filename given. Using default name %s..." % name
116            asaplog.push(msg)
117        name = path.expandvars(name)
118        if path.isfile(name) or path.isdir(name):
119            if not overwrite:
120                msg = "File %s exists." % name
121                if rcParams['verbose']:
122                    print msg
123                    return
124                else:
125                    raise IOError(msg)
126        format2 = format.upper()
127        if format2 == 'ASAP':
128            self._save(name)
129        else:
130            from asap._asap import stwriter as stw
131            writer = stw(format2)
132            writer.write(self, name)
133        print_log()
134        return
135
136    def copy(self):
137        """
138        Return a copy of this scantable.
139        Note:
140            This makes a full (deep) copy. scan2 = scan1 makes a reference.
141        Parameters:
142            none
143        Example:
144            copiedscan = scan.copy()
145        """
146        sd = scantable(Scantable._copy(self))
147        return sd
148
149    def drop_scan(self, scanid=None):
150        """
151        Return a new scantable where the specified scan number(s) has(have)
152        been dropped.
153        Parameters:
154            scanid:    a (list of) scan number(s)
155        """
156        from asap import _is_sequence_or_number as _is_valid
157        from asap import _to_list
158        from asap import unique
159        if not _is_valid(scanid):
160            if rcParams['verbose']:
161                print "Please specify a scanno to drop from the scantable"
162                return
163            else:
164                raise RuntimeError("No scan given")
165        try:
166            scanid = _to_list(scanid)
167            allscans = unique([ self.getscan(i) for i in range(self.nrow())])
168            for sid in scanid: allscans.remove(sid)
169            if len(allscans) == 0:
170                raise ValueError("Can't remove all scans")
171        except ValueError:
172            if rcParams['verbose']:
173                print "Couldn't find any match."
174                return
175            else: raise
176        try:
177            bsel = self.get_selection()
178            sel = selector()
179            sel.set_scans(allscans)
180            self.set_selection(bsel+sel)
181            scopy = self._copy()
182            self.set_selection(bsel)
183            return scantable(scopy)
184        except RuntimeError:
185            if rcParams['verbose']:
186                print "Couldn't find any match."
187            else:
188                raise
189
190
191    def get_scan(self, scanid=None):
192        """
193        Return a specific scan (by scanno) or collection of scans (by
194        source name) in a new scantable.
195        Note:
196            See scantable.drop_scan() for the inverse operation.
197        Parameters:
198            scanid:    a (list of) scanno or a source name, unix-style
199                       patterns are accepted for source name matching, e.g.
200                       '*_R' gets all 'ref scans
201        Example:
202            # get all scans containing the source '323p459'
203            newscan = scan.get_scan('323p459')
204            # get all 'off' scans
205            refscans = scan.get_scan('*_R')
206            # get a susbset of scans by scanno (as listed in scan.summary())
207            newscan = scan.get_scan([0, 2, 7, 10])
208        """
209        if scanid is None:
210            if rcParams['verbose']:
211                print "Please specify a scan no or name to " \
212                      "retrieve from the scantable"
213                return
214            else:
215                raise RuntimeError("No scan given")
216
217        try:
218            bsel = self.get_selection()
219            sel = selector()
220            if type(scanid) is str:
221                sel.set_name(scanid)
222                self.set_selection(bsel+sel)
223                scopy = self._copy()
224                self.set_selection(bsel)
225                return scantable(scopy)
226            elif type(scanid) is int:
227                sel.set_scans([scanid])
228                self.set_selection(bsel+sel)
229                scopy = self._copy()
230                self.set_selection(bsel)
231                return scantable(scopy)
232            elif type(scanid) is list:
233                sel.set_scans(scanid)
234                self.set_selection(sel)
235                scopy = self._copy()
236                self.set_selection(bsel)
237                return scantable(scopy)
238            else:
239                msg = "Illegal scanid type, use 'int' or 'list' if ints."
240                if rcParams['verbose']:
241                    print msg
242                else:
243                    raise TypeError(msg)
244        except RuntimeError:
245            if rcParams['verbose']: print "Couldn't find any match."
246            else: raise
247
248    def __str__(self):
249        return Scantable._summary(self, True)
250
251    def summary(self, filename=None):
252        """
253        Print a summary of the contents of this scantable.
254        Parameters:
255            filename:    the name of a file to write the putput to
256                         Default - no file output
257            verbose:     print extra info such as the frequency table
258                         The default (False) is taken from .asaprc
259        """
260        info = Scantable._summary(self, True)
261        #if verbose is None: verbose = rcParams['scantable.verbosesummary']
262        if filename is not None:
263            if filename is "":
264                filename = 'scantable_summary.txt'
265            from os.path import expandvars, isdir
266            filename = expandvars(filename)
267            if not isdir(filename):
268                data = open(filename, 'w')
269                data.write(info)
270                data.close()
271            else:
272                msg = "Illegal file name '%s'." % (filename)
273                if rcParams['verbose']:
274                    print msg
275                else:
276                    raise IOError(msg)
277        if rcParams['verbose']:
278            try:
279                from IPython.genutils import page as pager
280            except ImportError:
281                from pydoc import pager
282            pager(info)
283        else:
284            return info
285
286    def get_spectrum(self, rowno):
287        """Return the spectrum for the current row in the scantable as a list.
288        Parameters:
289             rowno:   the row number to retrieve the spectrum from       
290        """
291        return self._getspectrum(rowno)
292
293    def get_mask(self, rowno):
294        """Return the mask for the current row in the scantable as a list.
295        Parameters:
296             rowno:   the row number to retrieve the mask from       
297        """
298        return self._getmask(rowno)
299
300    def set_spectrum(self, spec, rowno):
301        """Return the spectrum for the current row in the scantable as a list.
302        Parameters:
303             spec:   the spectrum
304             rowno:    the row number to set the spectrum for       
305        """
306        assert(len(spec) == self.nchan())
307        return self._setspectrum(spec, rowno)
308
309    def get_selection(self):
310        """
311        Get the selection object currently set on this scantable.
312        Parameters:
313            none
314        Example:
315            sel = scan.get_selection()
316            sel.set_ifs(0)              # select IF 0
317            scan.set_selection(sel)     # apply modified selection
318        """
319        return selector(self._getselection())
320
321    def set_selection(self, selection=selector()):
322        """
323        Select a subset of the data. All following operations on this scantable
324        are only applied to thi selection.
325        Parameters:
326            selection:    a selector object (default unset the selection)
327        Examples:
328            sel = selector()         # create a selection object
329            self.set_scans([0, 3])    # select SCANNO 0 and 3
330            scan.set_selection(sel)  # set the selection
331            scan.summary()           # will only print summary of scanno 0 an 3
332            scan.set_selection()     # unset the selection
333        """
334        self._setselection(selection)
335
336    def get_row(self, row=0, insitu=None):
337        """
338        Select a row in the scantable.
339        Return a scantable with single row.
340        Parameters:
341            row: row no of integration, default is 0.
342            insitu: if False a new scantable is returned.
343                    Otherwise, the scaling is done in-situ
344                    The default is taken from .asaprc (False)
345        """
346        if insitu is None: insitu = rcParams['insitu']
347        if not insitu:
348            workscan = self.copy()
349        else:
350            workscan = self
351        # Select a row
352        sel=selector()
353        sel.set_scans([workscan.getscan(row)])
354        sel.set_cycles([workscan.getcycle(row)])
355        sel.set_beams([workscan.getbeam(row)])
356        sel.set_ifs([workscan.getif(row)])
357        sel.set_polarisations([workscan.getpol(row)])
358        sel.set_name(workscan._getsourcename(row))
359        workscan.set_selection(sel)
360        if not workscan.nrow() == 1:
361            msg = "Cloud not identify single row. %d rows selected."%(workscan.nrow())
362            raise RuntimeError(msg)
363        del sel
364        if insitu:
365            self._assign(workscan)
366        else:
367            return workscan
368
369    def stats(self, stat='stddev', mask=None):
370        """
371        Determine the specified statistic of the current beam/if/pol
372        Takes a 'mask' as an optional parameter to specify which
373        channels should be excluded.
374        Parameters:
375            stat:    'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
376                     'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
377            mask:    an optional mask specifying where the statistic
378                     should be determined.
379        Example:
380            scan.set_unit('channel')
381            msk = scan.create_mask([100, 200], [500, 600])
382            scan.stats(stat='mean', mask=m)
383        """
384        if mask == None:
385            mask = []
386        axes = ['Beam', 'IF', 'Pol', 'Time']
387        if not self._check_ifs():
388            raise ValueError("Cannot apply mask as the IFs have different "
389                             "number of channels. Please use setselection() "
390                             "to select individual IFs")
391        rtnabc = False
392        if stat.lower().endswith('_abc'): rtnabc = True
393        getchan = False
394        if stat.lower().startswith('min') or stat.lower().startswith('max'):
395            chan = self._math._minmaxchan(self, mask, stat)
396            getchan = True
397            statvals = []
398        if not rtnabc: statvals = self._math._stats(self, mask, stat)
399
400        out = ''
401        axes = []
402        for i in range(self.nrow()):
403            axis = []
404            axis.append(self.getscan(i))
405            axis.append(self.getbeam(i))
406            axis.append(self.getif(i))
407            axis.append(self.getpol(i))
408            axis.append(self.getcycle(i))
409            axes.append(axis)
410            tm = self._gettime(i)
411            src = self._getsourcename(i)
412            refstr = ''
413            statunit= ''
414            if getchan:
415                qx, qy = self.chan2data(rowno=i, chan=chan[i])
416                if rtnabc:
417                    statvals.append(qx['value'])
418                    refstr = '(value: %3.3f' % (qy['value'])+' ['+qy['unit']+'])'
419                    statunit= '['+qx['unit']+']'
420                else:
421                    refstr = '(@ %3.3f' % (qx['value'])+' ['+qx['unit']+'])'
422                    #statunit= ' ['+qy['unit']+']'
423            out += 'Scan[%d] (%s) ' % (axis[0], src)
424            out += 'Time[%s]:\n' % (tm)
425            if self.nbeam(-1) > 1: out +=  ' Beam[%d] ' % (axis[1])
426            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (axis[2])
427            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (axis[3])
428            out += '= %3.3f   ' % (statvals[i]) +refstr+'\n'
429            out +=  "--------------------------------------------------\n"
430
431        if rcParams['verbose']:
432            print "--------------------------------------------------"
433            print " ", stat, statunit
434            print "--------------------------------------------------"
435            print out
436        #else:
437            #retval = { 'axesnames': ['scanno', 'beamno', 'ifno', 'polno', 'cycleno'],
438            #           'axes' : axes,
439            #           'data': statvals}
440        return statvals
441
442    def chan2data(self, rowno=0, chan=0):
443        """
444        Returns channel/frequency/velocity and spectral value
445        at an arbitrary row and channel in the scantable.
446        Parameters:
447            rowno:   a row number in the scantable. Default is the
448                     first row, i.e. rowno=0
449            chan:    a channel in the scantable. Default is the first
450                     channel, i.e. pos=0
451        """
452        if isinstance(rowno, int) and isinstance(chan, int):
453            qx = {'unit': self.get_unit(),
454                  'value': self._getabcissa(rowno)[chan]}
455            qy = {'unit': self.get_fluxunit(),
456                  'value': self._getspectrum(rowno)[chan]}
457            return qx, qy
458
459    def stddev(self, mask=None):
460        """
461        Determine the standard deviation of the current beam/if/pol
462        Takes a 'mask' as an optional parameter to specify which
463        channels should be excluded.
464        Parameters:
465            mask:    an optional mask specifying where the standard
466                     deviation should be determined.
467
468        Example:
469            scan.set_unit('channel')
470            msk = scan.create_mask([100, 200], [500, 600])
471            scan.stddev(mask=m)
472        """
473        return self.stats(stat='stddev', mask=mask);
474
475
476    def get_column_names(self):
477        """
478        Return a  list of column names, which can be used for selection.
479        """
480        return list(Scantable.get_column_names(self))
481
482    def get_tsys(self):
483        """
484        Return the System temperatures.
485        Returns:
486            a list of Tsys values for the current selection
487        """
488
489        return self._row_callback(self._gettsys, "Tsys")
490
491    def _row_callback(self, callback, label):
492        axes = []
493        axesnames = ['scanno', 'beamno', 'ifno', 'polno', 'cycleno']
494        out = ""
495        outvec = []
496        for i in range(self.nrow()):
497            axis = []
498            axis.append(self.getscan(i))
499            axis.append(self.getbeam(i))
500            axis.append(self.getif(i))
501            axis.append(self.getpol(i))
502            axis.append(self.getcycle(i))
503            axes.append(axis)
504            tm = self._gettime(i)
505            src = self._getsourcename(i)
506            out += 'Scan[%d] (%s) ' % (axis[0], src)
507            out += 'Time[%s]:\n' % (tm)
508            if self.nbeam(-1) > 1: out +=  ' Beam[%d] ' % (axis[1])
509            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (axis[2])
510            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (axis[3])
511            outvec.append(callback(i))
512            out += '= %3.3f\n' % (outvec[i])
513            out +=  "--------------------------------------------------\n"
514        if rcParams['verbose']:
515            print "--------------------------------------------------"
516            print " %s" % (label)
517            print "--------------------------------------------------"
518            print out
519        # disabled because the vector seems more useful
520        #retval = {'axesnames': axesnames, 'axes': axes, 'data': outvec}
521        return outvec
522
523    def _get_column(self, callback, row=-1):
524        """
525        """
526        if row == -1:
527            return [callback(i) for i in range(self.nrow())]
528        else:
529            if  0 <= row < self.nrow():
530                return callback(row)
531
532
533    def get_time(self, row=-1, asdatetime=False):
534        """
535        Get a list of time stamps for the observations.
536        Return a datetime object for each integration time stamp in the scantable.
537        Parameters:
538            row:          row no of integration. Default -1 return all rows
539            asdatetime:   return values as datetime objects rather than strings
540        Example:
541            none
542        """
543        from time import strptime
544        from datetime import datetime
545        times = self._get_column(self._gettime, row)
546        if not asdatetime:
547            return times
548        format = "%Y/%m/%d/%H:%M:%S"
549        if isinstance(times, list):
550            return [datetime(*strptime(i, format)[:6]) for i in times]
551        else:
552            return datetime(*strptime(times, format)[:6])
553
554
555    def get_inttime(self, row=-1):
556        """
557        Get a list of integration times for the observations.
558        Return a time in seconds for each integration in the scantable.
559        Parameters:
560            row:    row no of integration. Default -1 return all rows.
561        Example:
562            none
563        """
564        return self._get_column(self._getinttime, row)       
565       
566
567    def get_sourcename(self, row=-1):
568        """
569        Get a list source names for the observations.
570        Return a string for each integration in the scantable.
571        Parameters:
572            row:    row no of integration. Default -1 return all rows.
573        Example:
574            none
575        """
576        return self._get_column(self._getsourcename, row)
577
578    def get_elevation(self, row=-1):
579        """
580        Get a list of elevations for the observations.
581        Return a float for each integration in the scantable.
582        Parameters:
583            row:    row no of integration. Default -1 return all rows.
584        Example:
585            none
586        """
587        return self._get_column(self._getelevation, row)
588
589    def get_azimuth(self, row=-1):
590        """
591        Get a list of azimuths for the observations.
592        Return a float for each integration in the scantable.
593        Parameters:
594            row:    row no of integration. Default -1 return all rows.
595        Example:
596            none
597        """
598        return self._get_column(self._getazimuth, row)
599
600    def get_parangle(self, row=-1):
601        """
602        Get a list of parallactic angles for the observations.
603        Return a float for each integration in the scantable.
604        Parameters:
605            row:    row no of integration. Default -1 return all rows.
606        Example:
607            none
608        """
609        return self._get_column(self._getparangle, row)
610
611    def get_direction(self, row=-1):
612        """
613        Get a list of Positions on the sky (direction) for the observations.
614        Return a float for each integration in the scantable.
615        Parameters:
616            row:    row no of integration. Default -1 return all rows
617        Example:
618            none
619        """
620        return self._get_column(self._getdirection, row)
621
622    def get_directionval(self, row=-1):
623        """
624        Get a list of Positions on the sky (direction) for the observations.
625        Return a float for each integration in the scantable.
626        Parameters:
627            row:    row no of integration. Default -1 return all rows
628        Example:
629            none
630        """
631        return self._get_column(self._getdirectionvec, row)
632
633    def set_unit(self, unit='channel'):
634        """
635        Set the unit for all following operations on this scantable
636        Parameters:
637            unit:    optional unit, default is 'channel'
638                     one of '*Hz', 'km/s', 'channel', ''
639        """
640        varlist = vars()
641        if unit in ['', 'pixel', 'channel']:
642            unit = ''
643        inf = list(self._getcoordinfo())
644        inf[0] = unit
645        self._setcoordinfo(inf)
646        self._add_history("set_unit", varlist)
647
648    def set_instrument(self, instr):
649        """
650        Set the instrument for subsequent processing.
651        Parameters:
652            instr:    Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
653                      'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
654        """
655        self._setInstrument(instr)
656        self._add_history("set_instument", vars())
657        print_log()
658
659    def set_feedtype(self, feedtype):
660        """
661        Overwrite the feed type, which might not be set correctly.
662        Parameters:
663            feedtype:     'linear' or 'circular'
664        """
665        self._setfeedtype(feedtype)
666        self._add_history("set_feedtype", vars())
667        print_log()
668
669    def set_doppler(self, doppler='RADIO'):
670        """
671        Set the doppler for all following operations on this scantable.
672        Parameters:
673            doppler:    One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
674        """
675        varlist = vars()
676        inf = list(self._getcoordinfo())
677        inf[2] = doppler
678        self._setcoordinfo(inf)
679        self._add_history("set_doppler", vars())
680        print_log()
681
682    def set_freqframe(self, frame=None):
683        """
684        Set the frame type of the Spectral Axis.
685        Parameters:
686            frame:   an optional frame type, default 'LSRK'. Valid frames are:
687                     'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
688                     'GEO', 'GALACTO', 'LGROUP', 'CMB'
689        Examples:
690            scan.set_freqframe('BARY')
691        """
692        if frame is None: frame = rcParams['scantable.freqframe']
693        varlist = vars()
694        valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
695                   'GEO', 'GALACTO', 'LGROUP', 'CMB']
696
697        if frame in valid:
698            inf = list(self._getcoordinfo())
699            inf[1] = frame
700            self._setcoordinfo(inf)
701            self._add_history("set_freqframe", varlist)
702        else:
703            msg  = "Please specify a valid freq type. Valid types are:\n", valid
704            if rcParams['verbose']:
705                print msg
706            else:
707                raise TypeError(msg)
708        print_log()
709
710    def set_dirframe(self, frame=""):
711        """
712        Set the frame type of the Direction on the sky.
713        Parameters:
714            frame:   an optional frame type, default ''. Valid frames are:
715                     'J2000', 'B1950', 'GALACTIC'
716        Examples:
717            scan.set_dirframe('GALACTIC')
718        """
719        varlist = vars()
720        try:
721            Scantable.set_dirframe(self, frame)
722        except RuntimeError, msg:
723            if rcParams['verbose']:
724                print msg
725            else:
726                raise
727        self._add_history("set_dirframe", varlist)
728
729    def get_unit(self):
730        """
731        Get the default unit set in this scantable
732        Returns:
733            A unit string
734        """
735        inf = self._getcoordinfo()
736        unit = inf[0]
737        if unit == '': unit = 'channel'
738        return unit
739
740    def get_abcissa(self, rowno=0):
741        """
742        Get the abcissa in the current coordinate setup for the currently
743        selected Beam/IF/Pol
744        Parameters:
745            rowno:    an optional row number in the scantable. Default is the
746                      first row, i.e. rowno=0
747        Returns:
748            The abcissa values and the format string (as a dictionary)
749        """
750        abc = self._getabcissa(rowno)
751        lbl = self._getabcissalabel(rowno)
752        print_log()
753        return abc, lbl
754
755    def flag(self, mask=None, unflag=False):
756        """
757        Flag the selected data using an optional channel mask.
758        Parameters:
759            mask:   an optional channel mask, created with create_mask. Default
760                    (no mask) is all channels.
761            unflag:    if True, unflag the data
762        """
763        varlist = vars()
764        if mask is None:
765            mask = []
766        try:
767            self._flag(mask, unflag)
768        except RuntimeError, msg:
769            if rcParams['verbose']:
770                print msg
771                return
772            else: raise
773        self._add_history("flag", varlist)
774
775    def lag_flag(self, frequency, width=0.0, unit="GHz", insitu=None):
776        """
777        Flag the data in 'lag' space by providing a frequency to remove.
778        Flagged data in the scantable gets set to 0.0 before the fft.
779        No taper is applied.
780        Parameters:
781            frequency:    the frequency (really a period within the bandwidth)
782                          to remove
783            width:        the width of the frequency to remove, to remove a
784                          range of frequencies around the centre.
785            unit:         the frequency unit (default "GHz")
786        Notes:
787            It is recommended to flag edges of the band or strong
788            signals beforehand.
789        """
790        if insitu is None: insitu = rcParams['insitu']
791        self._math._setinsitu(insitu)
792        varlist = vars()
793        base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1. }
794        if not base.has_key(unit):
795            raise ValueError("%s is not a valid unit." % unit)
796        try:
797            s = scantable(self._math._lag_flag(self, frequency*base[unit],
798                                               width*base[unit]))
799        except RuntimeError, msg:
800            if rcParams['verbose']:
801                print msg
802                return
803            else: raise
804        s._add_history("lag_flag", varlist)
805        print_log()
806        if insitu:
807            self._assign(s)
808        else:
809            return s
810
811
812    def create_mask(self, *args, **kwargs):
813        """
814        Compute and return a mask based on [min, max] windows.
815        The specified windows are to be INCLUDED, when the mask is
816        applied.
817        Parameters:
818            [min, max], [min2, max2], ...
819                Pairs of start/end points (inclusive)specifying the regions
820                to be masked
821            invert:     optional argument. If specified as True,
822                        return an inverted mask, i.e. the regions
823                        specified are EXCLUDED
824            row:        create the mask using the specified row for
825                        unit conversions, default is row=0
826                        only necessary if frequency varies over rows.
827        Example:
828            scan.set_unit('channel')
829            a)
830            msk = scan.create_mask([400, 500], [800, 900])
831            # masks everything outside 400 and 500
832            # and 800 and 900 in the unit 'channel'
833
834            b)
835            msk = scan.create_mask([400, 500], [800, 900], invert=True)
836            # masks the regions between 400 and 500
837            # and 800 and 900 in the unit 'channel'
838            c)
839            mask only channel 400
840            msk =  scan.create_mask([400, 400])
841        """
842        row = 0
843        if kwargs.has_key("row"):
844            row = kwargs.get("row")
845        data = self._getabcissa(row)
846        u = self._getcoordinfo()[0]
847        if rcParams['verbose']:
848            if u == "": u = "channel"
849            msg = "The current mask window unit is %s" % u
850            i = self._check_ifs()
851            if not i:
852                msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
853            asaplog.push(msg)
854        n = self.nchan()
855        msk = _n_bools(n, False)
856        # test if args is a 'list' or a 'normal *args - UGLY!!!
857
858        ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
859             and args or args[0]
860        for window in ws:
861            if (len(window) != 2 or window[0] > window[1] ):
862                raise TypeError("A window needs to be defined as [min, max]")
863            for i in range(n):
864                if data[i] >= window[0] and data[i] <= window[1]:
865                    msk[i] = True
866        if kwargs.has_key('invert'):
867            if kwargs.get('invert'):
868                msk = mask_not(msk)
869        print_log()
870        return msk
871
872    def get_masklist(self, mask=None, row=0):
873        """
874        Compute and return a list of mask windows, [min, max].
875        Parameters:
876            mask:       channel mask, created with create_mask.
877            row:        calcutate the masklist using the specified row
878                        for unit conversions, default is row=0
879                        only necessary if frequency varies over rows.
880        Returns:
881            [min, max], [min2, max2], ...
882                Pairs of start/end points (inclusive)specifying
883                the masked regions
884        """
885        if not (isinstance(mask,list) or isinstance(mask, tuple)):
886            raise TypeError("The mask should be list or tuple.")
887        if len(mask) < 2:
888            raise TypeError("The mask elements should be > 1")
889        if self.nchan() != len(mask):
890            msg = "Number of channels in scantable != number of mask elements"
891            raise TypeError(msg)
892        data = self._getabcissa(row)
893        u = self._getcoordinfo()[0]
894        if rcParams['verbose']:
895            if u == "": u = "channel"
896            msg = "The current mask window unit is %s" % u
897            i = self._check_ifs()
898            if not i:
899                msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
900            asaplog.push(msg)
901        masklist=[]
902        ist, ien = None, None
903        ist, ien=self.get_mask_indices(mask)
904        if ist is not None and ien is not None:
905            for i in xrange(len(ist)):
906                range=[data[ist[i]],data[ien[i]]]
907                range.sort()
908                masklist.append([range[0],range[1]])
909        return masklist
910
911    def get_mask_indices(self, mask=None):
912        """
913        Compute and Return lists of mask start indices and mask end indices.
914         Parameters:
915            mask:       channel mask, created with create_mask.
916        Returns:
917            List of mask start indices and that of mask end indices,
918            i.e., [istart1,istart2,....], [iend1,iend2,....].
919        """
920        if not (isinstance(mask,list) or isinstance(mask, tuple)):
921            raise TypeError("The mask should be list or tuple.")
922        if len(mask) < 2:
923            raise TypeError("The mask elements should be > 1")
924        istart=[]
925        iend=[]
926        if mask[0]: istart.append(0)
927        for i in range(len(mask)-1):
928            if not mask[i] and mask[i+1]:
929                istart.append(i+1)
930            elif mask[i] and not mask[i+1]:
931                iend.append(i)
932        if mask[len(mask)-1]: iend.append(len(mask)-1)
933        if len(istart) != len(iend):
934            raise RuntimeError("Numbers of mask start != mask end.")
935        for i in range(len(istart)):
936            if istart[i] > iend[i]:
937                raise RuntimeError("Mask start index > mask end index")
938                break
939        return istart,iend
940
941#    def get_restfreqs(self):
942#        """
943#        Get the restfrequency(s) stored in this scantable.
944#        The return value(s) are always of unit 'Hz'
945#        Parameters:
946#            none
947#        Returns:
948#            a list of doubles
949#        """
950#        return list(self._getrestfreqs())
951
952    def get_restfreqs(self, ids=None):
953        """
954        Get the restfrequency(s) stored in this scantable.
955        The return value(s) are always of unit 'Hz'
956        Parameters:
957            ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
958                 be retrieved
959        Returns:
960            dictionary containing ids and a list of doubles for each id
961        """
962        if ids is None:
963            rfreqs={}
964            idlist = self.getmolnos()
965            for i in idlist:
966                rfreqs[i]=list(self._getrestfreqs(i))
967            return rfreqs
968        else:
969            if type(ids)==list or type(ids)==tuple:
970                rfreqs={}
971                for i in ids:
972                    rfreqs[i]=list(self._getrestfreqs(i))
973                return rfreqs
974            else:
975                return list(self._getrestfreqs(ids))
976            #return list(self._getrestfreqs(ids))
977
978    def set_restfreqs(self, freqs=None, unit='Hz'):
979        """
980        ********NEED TO BE UPDATED begin************
981        Set or replace the restfrequency specified and
982        If the 'freqs' argument holds a scalar,
983        then that rest frequency will be applied to all the selected
984        data.  If the 'freqs' argument holds
985        a vector, then it MUST be of equal or smaller length than
986        the number of IFs (and the available restfrequencies will be
987        replaced by this vector).  In this case, *all* data have
988        the restfrequency set per IF according
989        to the corresponding value you give in the 'freqs' vector.
990        E.g. 'freqs=[1e9, 2e9]'  would mean IF 0 gets restfreq 1e9 and
991        IF 1 gets restfreq 2e9.
992        ********NEED TO BE UPDATED end************
993        You can also specify the frequencies via a linecatalog.
994
995        Parameters:
996            freqs:   list of rest frequency values or string idenitfiers
997            unit:    unit for rest frequency (default 'Hz')
998
999        Example:
1000            # set the given restfrequency for the all currently selected IFs
1001            scan.set_restfreqs(freqs=1.4e9)
1002            # set multiple restfrequencies to all the selected data
1003            scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
1004            # If the number of IFs in the data is >= 2 the IF0 gets the first
1005            # value IF1 the second... NOTE that freqs needs to be
1006            # specified in list of list (e.g. [[],[],...] ).
1007            scan.set_restfreqs(freqs=[[1.4e9],[1.67e9]])
1008            #set the given restfrequency for the whole table (by name)
1009            scan.set_restfreqs(freqs="OH1667")
1010
1011        Note:
1012            To do more sophisticate Restfrequency setting, e.g. on a
1013            source and IF basis, use scantable.set_selection() before using
1014            this function.
1015            # provide your scantable is call scan
1016            selection = selector()
1017            selection.set_name("ORION*")
1018            selection.set_ifs([1])
1019            scan.set_selection(selection)
1020            scan.set_restfreqs(freqs=86.6e9)
1021
1022        """
1023        varlist = vars()
1024        from asap import linecatalog
1025        # simple  value
1026        if isinstance(freqs, int) or isinstance(freqs, float):
1027            # TT mod
1028            #self._setrestfreqs(freqs, "",unit)
1029            self._setrestfreqs([freqs], [""],unit)
1030        # list of values
1031        elif isinstance(freqs, list) or isinstance(freqs, tuple):
1032            # list values are scalars
1033            if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
1034                self._setrestfreqs(freqs, [""],unit)
1035            # list values are tuples, (value, name)
1036            elif isinstance(freqs[-1], dict):
1037                #sel = selector()
1038                #savesel = self._getselection()
1039                #iflist = self.getifnos()
1040                #for i in xrange(len(freqs)):
1041                #    sel.set_ifs(iflist[i])
1042                #    self._setselection(sel)
1043                #    self._setrestfreqs(freqs[i], "",unit)
1044                #self._setselection(savesel)
1045                self._setrestfreqs(freqs["value"],
1046                                   freqs["name"], "MHz")
1047            elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
1048                sel = selector()
1049                savesel = self._getselection()
1050                iflist = self.getifnos()
1051                if len(freqs)>len(iflist):
1052                    raise ValueError("number of elements in list of list exeeds the current IF selections")
1053                for i in xrange(len(freqs)):
1054                    sel.set_ifs(iflist[i])
1055                    self._setselection(sel)
1056                    self._setrestfreqs(freqs[i]["value"],
1057                                       freqs[i]["name"], "MHz")
1058                self._setselection(savesel)
1059        # freqs are to be taken from a linecatalog
1060        elif isinstance(freqs, linecatalog):
1061            sel = selector()
1062            savesel = self._getselection()
1063            for i in xrange(freqs.nrow()):
1064                sel.set_ifs(iflist[i])
1065                self._setselection(sel)
1066                self._setrestfreqs(freqs.get_frequency(i),
1067                                   freqs.get_name(i), "MHz")
1068                # ensure that we are not iterating past nIF
1069                if i == self.nif()-1: break
1070            self._setselection(savesel)
1071        else:
1072            return
1073        self._add_history("set_restfreqs", varlist)
1074
1075    def shift_refpix(self, delta):
1076        """
1077        Shift the reference pixel of the Spectra Coordinate by an
1078        integer amount.
1079        Parameters:
1080            delta:   the amount to shift by
1081        Note:
1082            Be careful using this with broadband data.
1083        """
1084        Scantable.shift(self, delta)
1085
1086    def history(self, filename=None):
1087        """
1088        Print the history. Optionally to a file.
1089        Parameters:
1090            filename:    The name  of the file to save the history to.
1091        """
1092        hist = list(self._gethistory())
1093        out = "-"*80
1094        for h in hist:
1095            if h.startswith("---"):
1096                out += "\n"+h
1097            else:
1098                items = h.split("##")
1099                date = items[0]
1100                func = items[1]
1101                items = items[2:]
1102                out += "\n"+date+"\n"
1103                out += "Function: %s\n  Parameters:" % (func)
1104                for i in items:
1105                    s = i.split("=")
1106                    out += "\n   %s = %s" % (s[0], s[1])
1107                out += "\n"+"-"*80
1108        if filename is not None:
1109            if filename is "":
1110                filename = 'scantable_history.txt'
1111            import os
1112            filename = os.path.expandvars(os.path.expanduser(filename))
1113            if not os.path.isdir(filename):
1114                data = open(filename, 'w')
1115                data.write(out)
1116                data.close()
1117            else:
1118                msg = "Illegal file name '%s'." % (filename)
1119                if rcParams['verbose']:
1120                    print msg
1121                else:
1122                    raise IOError(msg)
1123        if rcParams['verbose']:
1124            try:
1125                from IPython.genutils import page as pager
1126            except ImportError:
1127                from pydoc import pager
1128            pager(out)
1129        else:
1130            return out
1131        return
1132    #
1133    # Maths business
1134    #
1135
1136    def average_time(self, mask=None, scanav=False, weight='tint', align=False):
1137        """
1138        Return the (time) weighted average of a scan.
1139        Note:
1140            in channels only - align if necessary
1141        Parameters:
1142            mask:     an optional mask (only used for 'var' and 'tsys'
1143                      weighting)
1144            scanav:   True averages each scan separately
1145                      False (default) averages all scans together,
1146            weight:   Weighting scheme.
1147                      'none'     (mean no weight)
1148                      'var'      (1/var(spec) weighted)
1149                      'tsys'     (1/Tsys**2 weighted)
1150                      'tint'     (integration time weighted)
1151                      'tintsys'  (Tint/Tsys**2)
1152                      'median'   ( median averaging)
1153                      The default is 'tint'
1154            align:    align the spectra in velocity before averaging. It takes
1155                      the time of the first spectrum as reference time.
1156        Example:
1157            # time average the scantable without using a mask
1158            newscan = scan.average_time()
1159        """
1160        varlist = vars()
1161        if weight is None: weight = 'TINT'
1162        if mask is None: mask = ()
1163        if scanav: scanav = "SCAN"
1164        else: scanav = "NONE"
1165        scan = (self, )
1166        try:
1167            if align:
1168                scan = (self.freq_align(insitu=False), )
1169            s = None
1170            if weight.upper() == 'MEDIAN':
1171                s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1172                                                         scanav))
1173            else:
1174                s = scantable(self._math._average(scan, mask, weight.upper(),
1175                              scanav))
1176        except RuntimeError, msg:
1177            if rcParams['verbose']:
1178                print msg
1179                return
1180            else: raise
1181        s._add_history("average_time", varlist)
1182        print_log()
1183        return s
1184
1185    def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
1186        """
1187        Return a scan where all spectra are converted to either
1188        Jansky or Kelvin depending upon the flux units of the scan table.
1189        By default the function tries to look the values up internally.
1190        If it can't find them (or if you want to over-ride), you must
1191        specify EITHER jyperk OR eta (and D which it will try to look up
1192        also if you don't set it). jyperk takes precedence if you set both.
1193        Parameters:
1194            jyperk:      the Jy / K conversion factor
1195            eta:         the aperture efficiency
1196            d:           the geomtric diameter (metres)
1197            insitu:      if False a new scantable is returned.
1198                         Otherwise, the scaling is done in-situ
1199                         The default is taken from .asaprc (False)
1200        """
1201        if insitu is None: insitu = rcParams['insitu']
1202        self._math._setinsitu(insitu)
1203        varlist = vars()
1204        if jyperk is None: jyperk = -1.0
1205        if d is None: d = -1.0
1206        if eta is None: eta = -1.0
1207        s = scantable(self._math._convertflux(self, d, eta, jyperk))
1208        s._add_history("convert_flux", varlist)
1209        print_log()
1210        if insitu: self._assign(s)
1211        else: return s
1212
1213    def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1214        """
1215        Return a scan after applying a gain-elevation correction.
1216        The correction can be made via either a polynomial or a
1217        table-based interpolation (and extrapolation if necessary).
1218        You specify polynomial coefficients, an ascii table or neither.
1219        If you specify neither, then a polynomial correction will be made
1220        with built in coefficients known for certain telescopes (an error
1221        will occur if the instrument is not known).
1222        The data and Tsys are *divided* by the scaling factors.
1223        Parameters:
1224            poly:        Polynomial coefficients (default None) to compute a
1225                         gain-elevation correction as a function of
1226                         elevation (in degrees).
1227            filename:    The name of an ascii file holding correction factors.
1228                         The first row of the ascii file must give the column
1229                         names and these MUST include columns
1230                         "ELEVATION" (degrees) and "FACTOR" (multiply data
1231                         by this) somewhere.
1232                         The second row must give the data type of the
1233                         column. Use 'R' for Real and 'I' for Integer.
1234                         An example file would be
1235                         (actual factors are arbitrary) :
1236
1237                         TIME ELEVATION FACTOR
1238                         R R R
1239                         0.1 0 0.8
1240                         0.2 20 0.85
1241                         0.3 40 0.9
1242                         0.4 60 0.85
1243                         0.5 80 0.8
1244                         0.6 90 0.75
1245            method:      Interpolation method when correcting from a table.
1246                         Values are  "nearest", "linear" (default), "cubic"
1247                         and "spline"
1248            insitu:      if False a new scantable is returned.
1249                         Otherwise, the scaling is done in-situ
1250                         The default is taken from .asaprc (False)
1251        """
1252
1253        if insitu is None: insitu = rcParams['insitu']
1254        self._math._setinsitu(insitu)
1255        varlist = vars()
1256        if poly is None:
1257            poly = ()
1258        from os.path import expandvars
1259        filename = expandvars(filename)
1260        s = scantable(self._math._gainel(self, poly, filename, method))
1261        s._add_history("gain_el", varlist)
1262        print_log()
1263        if insitu: self._assign(s)
1264        else: return s
1265
1266    def freq_align(self, reftime=None, method='cubic', insitu=None):
1267        """
1268        Return a scan where all rows have been aligned in frequency/velocity.
1269        The alignment frequency frame (e.g. LSRK) is that set by function
1270        set_freqframe.
1271        Parameters:
1272            reftime:     reference time to align at. By default, the time of
1273                         the first row of data is used.
1274            method:      Interpolation method for regridding the spectra.
1275                         Choose from "nearest", "linear", "cubic" (default)
1276                         and "spline"
1277            insitu:      if False a new scantable is returned.
1278                         Otherwise, the scaling is done in-situ
1279                         The default is taken from .asaprc (False)
1280        """
1281        if insitu is None: insitu = rcParams["insitu"]
1282        self._math._setinsitu(insitu)
1283        varlist = vars()
1284        if reftime is None: reftime = ""
1285        s = scantable(self._math._freq_align(self, reftime, method))
1286        s._add_history("freq_align", varlist)
1287        print_log()
1288        if insitu: self._assign(s)
1289        else: return s
1290
1291    def opacity(self, tau, insitu=None):
1292        """
1293        Apply an opacity correction. The data
1294        and Tsys are multiplied by the correction factor.
1295        Parameters:
1296            tau:         Opacity from which the correction factor is
1297                         exp(tau*ZD)
1298                         where ZD is the zenith-distance
1299            insitu:      if False a new scantable is returned.
1300                         Otherwise, the scaling is done in-situ
1301                         The default is taken from .asaprc (False)
1302        """
1303        if insitu is None: insitu = rcParams['insitu']
1304        self._math._setinsitu(insitu)
1305        varlist = vars()
1306        s = scantable(self._math._opacity(self, tau))
1307        s._add_history("opacity", varlist)
1308        print_log()
1309        if insitu: self._assign(s)
1310        else: return s
1311
1312    def bin(self, width=5, insitu=None):
1313        """
1314        Return a scan where all spectra have been binned up.
1315        Parameters:
1316            width:       The bin width (default=5) in pixels
1317            insitu:      if False a new scantable is returned.
1318                         Otherwise, the scaling is done in-situ
1319                         The default is taken from .asaprc (False)
1320        """
1321        if insitu is None: insitu = rcParams['insitu']
1322        self._math._setinsitu(insitu)
1323        varlist = vars()
1324        s = scantable(self._math._bin(self, width))
1325        s._add_history("bin", varlist)
1326        print_log()
1327        if insitu: self._assign(s)
1328        else: return s
1329
1330
1331    def resample(self, width=5, method='cubic', insitu=None):
1332        """
1333        Return a scan where all spectra have been binned up.
1334       
1335        Parameters:
1336            width:       The bin width (default=5) in pixels
1337            method:      Interpolation method when correcting from a table.
1338                         Values are  "nearest", "linear", "cubic" (default)
1339                         and "spline"
1340            insitu:      if False a new scantable is returned.
1341                         Otherwise, the scaling is done in-situ
1342                         The default is taken from .asaprc (False)
1343        """
1344        if insitu is None: insitu = rcParams['insitu']
1345        self._math._setinsitu(insitu)
1346        varlist = vars()
1347        s = scantable(self._math._resample(self, method, width))
1348        s._add_history("resample", varlist)
1349        print_log()
1350        if insitu: self._assign(s)
1351        else: return s
1352
1353
1354    def average_pol(self, mask=None, weight='none'):
1355        """
1356        Average the Polarisations together.
1357        Parameters:
1358            mask:        An optional mask defining the region, where the
1359                         averaging will be applied. The output will have all
1360                         specified points masked.
1361            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1362                         weighted), or 'tsys' (1/Tsys**2 weighted)
1363        """
1364        varlist = vars()
1365        if mask is None:
1366            mask = ()
1367        s = scantable(self._math._averagepol(self, mask, weight.upper()))
1368        s._add_history("average_pol", varlist)
1369        print_log()
1370        return s
1371
1372    def average_beam(self, mask=None, weight='none'):
1373        """
1374        Average the Beams together.
1375        Parameters:
1376            mask:        An optional mask defining the region, where the
1377                         averaging will be applied. The output will have all
1378                         specified points masked.
1379            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1380                         weighted), or 'tsys' (1/Tsys**2 weighted)
1381        """
1382        varlist = vars()
1383        if mask is None:
1384            mask = ()
1385        s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1386        s._add_history("average_beam", varlist)
1387        print_log()
1388        return s
1389
1390    def convert_pol(self, poltype=None):
1391        """
1392        Convert the data to a different polarisation type.
1393        Parameters:
1394            poltype:    The new polarisation type. Valid types are:
1395                        "linear", "stokes" and "circular"
1396        """
1397        varlist = vars()
1398        try:
1399            s = scantable(self._math._convertpol(self, poltype))
1400        except RuntimeError, msg:
1401            if rcParams['verbose']:
1402                print msg
1403                return
1404            else:
1405                raise
1406        s._add_history("convert_pol", varlist)
1407        print_log()
1408        return s
1409
1410    def smooth(self, kernel="hanning", width=5.0, insitu=None):
1411        """
1412        Smooth the spectrum by the specified kernel (conserving flux).
1413        Parameters:
1414            kernel:     The type of smoothing kernel. Select from
1415                        'hanning' (default), 'gaussian', 'boxcar' and
1416                        'rmedian'
1417            width:      The width of the kernel in pixels. For hanning this is
1418                        ignored otherwise it defauls to 5 pixels.
1419                        For 'gaussian' it is the Full Width Half
1420                        Maximum. For 'boxcar' it is the full width.
1421                        For 'rmedian' it is the half width.
1422            insitu:     if False a new scantable is returned.
1423                        Otherwise, the scaling is done in-situ
1424                        The default is taken from .asaprc (False)
1425        Example:
1426             none
1427        """
1428        if insitu is None: insitu = rcParams['insitu']
1429        self._math._setinsitu(insitu)
1430        varlist = vars()
1431        s = scantable(self._math._smooth(self, kernel.lower(), width))
1432        s._add_history("smooth", varlist)
1433        print_log()
1434        if insitu: self._assign(s)
1435        else: return s
1436
1437
1438    def poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None):
1439        """
1440        Return a scan which has been baselined (all rows) by a polynomial.
1441        Parameters:
1442            mask:       an optional mask
1443            order:      the order of the polynomial (default is 0)
1444            plot:       plot the fit and the residual. In this each
1445                        indivual fit has to be approved, by typing 'y'
1446                        or 'n'
1447            uselin:     use linear polynomial fit
1448            insitu:     if False a new scantable is returned.
1449                        Otherwise, the scaling is done in-situ
1450                        The default is taken from .asaprc (False)
1451        Example:
1452            # return a scan baselined by a third order polynomial,
1453            # not using a mask
1454            bscan = scan.poly_baseline(order=3)
1455        """
1456        if insitu is None: insitu = rcParams['insitu']
1457        varlist = vars()
1458        if mask is None:
1459            mask = [True for i in xrange(self.nchan(-1))]
1460        from asap.asapfitter import fitter
1461        try:
1462            f = fitter()
1463            f.set_scan(self, mask)
1464            if uselin:
1465                f.set_function(lpoly=order)
1466            else:
1467                f.set_function(poly=order)
1468            s = f.auto_fit(insitu, plot=plot)
1469            # Save parameters of baseline fits as a class attribute.
1470            # NOTICE: It does not reflect changes in scantable!
1471            self.blpars = f.blpars
1472            s._add_history("poly_baseline", varlist)
1473            print_log()
1474            if insitu: self._assign(s)
1475            else: return s
1476        except RuntimeError:
1477            msg = "The fit failed, possibly because it didn't converge."
1478            if rcParams['verbose']:
1479                print msg
1480                return
1481            else:
1482                raise RuntimeError(msg)
1483
1484
1485    def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
1486                           threshold=3, chan_avg_limit=1, plot=False,
1487                           insitu=None):
1488        """
1489        Return a scan which has been baselined (all rows) by a polynomial.
1490        Spectral lines are detected first using linefinder and masked out
1491        to avoid them affecting the baseline solution.
1492
1493        Parameters:
1494            mask:       an optional mask retreived from scantable
1495            edge:       an optional number of channel to drop at
1496                        the edge of spectrum. If only one value is
1497                        specified, the same number will be dropped from
1498                        both sides of the spectrum. Default is to keep
1499                        all channels. Nested tuples represent individual
1500                        edge selection for different IFs (a number of spectral
1501                        channels can be different)
1502            order:      the order of the polynomial (default is 0)
1503            threshold:  the threshold used by line finder. It is better to
1504                        keep it large as only strong lines affect the
1505                        baseline solution.
1506            chan_avg_limit:
1507                        a maximum number of consequtive spectral channels to
1508                        average during the search of weak and broad lines.
1509                        The default is no averaging (and no search for weak
1510                        lines). If such lines can affect the fitted baseline
1511                        (e.g. a high order polynomial is fitted), increase this
1512                        parameter (usually values up to 8 are reasonable). Most
1513                        users of this method should find the default value
1514                        sufficient.
1515            plot:       plot the fit and the residual. In this each
1516                        indivual fit has to be approved, by typing 'y'
1517                        or 'n'
1518            insitu:     if False a new scantable is returned.
1519                        Otherwise, the scaling is done in-situ
1520                        The default is taken from .asaprc (False)
1521
1522        Example:
1523            scan2=scan.auto_poly_baseline(order=7)
1524        """
1525        if insitu is None: insitu = rcParams['insitu']
1526        varlist = vars()
1527        from asap.asapfitter import fitter
1528        from asap.asaplinefind import linefinder
1529        from asap import _is_sequence_or_number as _is_valid
1530
1531        # check whether edge is set up for each IF individually
1532        individualedge = False;
1533        if len(edge) > 1:
1534            if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1535                individualedge = True;
1536
1537        if not _is_valid(edge, int) and not individualedge:
1538            raise ValueError, "Parameter 'edge' has to be an integer or a \
1539            pair of integers specified as a tuple. Nested tuples are allowed \
1540            to make individual selection for different IFs."
1541
1542        curedge = (0, 0)
1543        if individualedge:
1544            for edgepar in edge:
1545                if not _is_valid(edgepar, int):
1546                    raise ValueError, "Each element of the 'edge' tuple has \
1547                                       to be a pair of integers or an integer."
1548        else:
1549            curedge = edge;
1550
1551        # setup fitter
1552        f = fitter()
1553        f.set_function(poly=order)
1554
1555        # setup line finder
1556        fl = linefinder()
1557        fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
1558
1559        if not insitu:
1560            workscan = self.copy()
1561        else:
1562            workscan = self
1563
1564        fl.set_scan(workscan)
1565
1566        rows = range(workscan.nrow())
1567        # Save parameters of baseline fits & masklists as a class attribute.
1568        # NOTICE: It does not reflect changes in scantable!
1569        if len(rows) > 0:
1570            self.blpars=[]
1571            self.masklists=[]
1572        asaplog.push("Processing:")
1573        for r in rows:
1574            msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1575                (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1576                 workscan.getpol(r), workscan.getcycle(r))
1577            asaplog.push(msg, False)
1578
1579            # figure out edge parameter
1580            if individualedge:
1581                if len(edge) >= workscan.getif(r):
1582                    raise RuntimeError, "Number of edge elements appear to " \
1583                                        "be less than the number of IFs"
1584                    curedge = edge[workscan.getif(r)]
1585
1586            # setup line finder
1587            fl.find_lines(r, mask, curedge)
1588            outmask=fl.get_mask()
1589            f.set_scan(workscan, fl.get_mask())
1590            f.x = workscan._getabcissa(r)
1591            f.y = workscan._getspectrum(r)
1592            f.data = None
1593            f.fit()
1594           
1595            # Show mask list
1596            masklist=workscan.get_masklist(fl.get_mask(),row=r)
1597            msg = "mask range: "+str(masklist)
1598            asaplog.push(msg, False)
1599
1600            fpar = f.get_parameters()
1601            if plot:
1602                f.plot(residual=True)
1603                x = raw_input("Accept fit ( [y]/n ): ")
1604                if x.upper() == 'N':
1605                    self.blpars.append(None)
1606                    self.masklists.append(None)
1607                    continue
1608            workscan._setspectrum(f.fitter.getresidual(), r)
1609            self.blpars.append(fpar)
1610            self.masklists.append(masklist)
1611        if plot:
1612            f._p.unmap()
1613            f._p = None
1614        workscan._add_history("auto_poly_baseline", varlist)
1615        if insitu:
1616            self._assign(workscan)
1617        else:
1618            return workscan
1619
1620    def rotate_linpolphase(self, angle):
1621        """
1622        Rotate the phase of the complex polarization O=Q+iU correlation.
1623        This is always done in situ in the raw data.  So if you call this
1624        function more than once then each call rotates the phase further.
1625        Parameters:
1626            angle:   The angle (degrees) to rotate (add) by.
1627        Examples:
1628            scan.rotate_linpolphase(2.3)
1629        """
1630        varlist = vars()
1631        self._math._rotate_linpolphase(self, angle)
1632        self._add_history("rotate_linpolphase", varlist)
1633        print_log()
1634        return
1635
1636
1637    def rotate_xyphase(self, angle):
1638        """
1639        Rotate the phase of the XY correlation.  This is always done in situ
1640        in the data.  So if you call this function more than once
1641        then each call rotates the phase further.
1642        Parameters:
1643            angle:   The angle (degrees) to rotate (add) by.
1644        Examples:
1645            scan.rotate_xyphase(2.3)
1646        """
1647        varlist = vars()
1648        self._math._rotate_xyphase(self, angle)
1649        self._add_history("rotate_xyphase", varlist)
1650        print_log()
1651        return
1652
1653    def swap_linears(self):
1654        """
1655        Swap the linear polarisations XX and YY, or better the first two
1656        polarisations as this also works for ciculars.
1657        """
1658        varlist = vars()
1659        self._math._swap_linears(self)
1660        self._add_history("swap_linears", varlist)
1661        print_log()
1662        return
1663
1664    def invert_phase(self):
1665        """
1666        Invert the phase of the complex polarisation
1667        """
1668        varlist = vars()
1669        self._math._invert_phase(self)
1670        self._add_history("invert_phase", varlist)
1671        print_log()
1672        return
1673
1674    def add(self, offset, insitu=None):
1675        """
1676        Return a scan where all spectra have the offset added
1677        Parameters:
1678            offset:      the offset
1679            insitu:      if False a new scantable is returned.
1680                         Otherwise, the scaling is done in-situ
1681                         The default is taken from .asaprc (False)
1682        """
1683        if insitu is None: insitu = rcParams['insitu']
1684        self._math._setinsitu(insitu)
1685        varlist = vars()
1686        s = scantable(self._math._unaryop(self, offset, "ADD", False))
1687        s._add_history("add", varlist)
1688        print_log()
1689        if insitu:
1690            self._assign(s)
1691        else:
1692            return s
1693
1694    def scale(self, factor, tsys=True, insitu=None):
1695        """
1696        Return a scan where all spectra are scaled by the give 'factor'
1697        Parameters:
1698            factor:      the scaling factor
1699            insitu:      if False a new scantable is returned.
1700                         Otherwise, the scaling is done in-situ
1701                         The default is taken from .asaprc (False)
1702            tsys:        if True (default) then apply the operation to Tsys
1703                         as well as the data
1704        """
1705        if insitu is None: insitu = rcParams['insitu']
1706        self._math._setinsitu(insitu)
1707        varlist = vars()
1708        s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
1709        s._add_history("scale", varlist)
1710        print_log()
1711        if insitu:
1712            self._assign(s)
1713        else:
1714            return s
1715
1716    def set_sourcetype(self, match, matchtype="pattern",
1717                       sourcetype="reference"):
1718        """
1719        Set the type of the source to be an source or reference scan
1720        using the provided pattern:
1721        Parameters:
1722            match:          a Unix style pattern, regular expression or selector
1723            matchtype:      'pattern' (default) UNIX style pattern or
1724                            'regex' regular expression
1725            sourcetype:     the type of the source to use (source/reference)
1726        """
1727        varlist = vars()
1728        basesel = self.get_selection()
1729        stype = -1
1730        if sourcetype.lower().startswith("r"):
1731            stype = 1
1732        elif sourcetype.lower().startswith("s"):
1733            stype = 0
1734        else:
1735            raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
1736        if matchtype.lower().startswith("p"):
1737            matchtype = "pattern"
1738        elif matchtype.lower().startswith("r"):
1739            matchtype = "regex"
1740        else:
1741            raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
1742        sel = selector()
1743        if isinstance(match, selector):
1744            sel = match
1745        else:
1746            sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
1747        self.set_selection(basesel+sel)
1748        self._setsourcetype(stype)
1749        self.set_selection(basesel)
1750        s._add_history("set_sourcetype", varlist)
1751
1752    def auto_quotient(self, preserve=True, mode='paired'):
1753        """
1754        This function allows to build quotients automatically.
1755        It assumes the observation to have the same numer of
1756        "ons" and "offs"
1757        Parameters:
1758            preserve:       you can preserve (default) the continuum or
1759                            remove it.  The equations used are
1760                            preserve: Output = Toff * (on/off) - Toff
1761                            remove:   Output = Toff * (on/off) - Ton
1762            mode:           the on/off detection mode
1763                            'paired' (default)
1764                            identifies 'off' scans by the
1765                            trailing '_R' (Mopra/Parkes) or
1766                            '_e'/'_w' (Tid) and matches
1767                            on/off pairs from the observing pattern
1768                            'time'
1769                            finds the closest off in time
1770
1771        """
1772        modes = ["time", "paired"]
1773        if not mode in modes:
1774            msg = "please provide valid mode. Valid modes are %s" % (modes)
1775            raise ValueError(msg)
1776        varlist = vars()
1777        s = None
1778        if mode.lower() == "paired":
1779            basesel = self.get_selection()
1780            sel = selector()+basesel
1781            sel.set_query("SRCTYPE==1")
1782            self.set_selection(sel)
1783            offs = self.copy()
1784            sel.set_query("SRCTYPE==0")
1785            self.set_selection(sel)
1786            ons = self.copy()
1787            s = scantable(self._math._quotient(ons, offs, preserve))
1788            self.set_selection(basesel)
1789        elif mode.lower() == "time":
1790            s = scantable(self._math._auto_quotient(self, mode, preserve))
1791        s._add_history("auto_quotient", varlist)
1792        print_log()
1793        return s
1794
1795    def mx_quotient(self, mask = None, weight='median', preserve=True):
1796        """
1797        Form a quotient using "off" beams when observing in "MX" mode.
1798        Parameters:
1799            mask:           an optional mask to be used when weight == 'stddev'
1800            weight:         How to average the off beams.  Default is 'median'.
1801            preserve:       you can preserve (default) the continuum or
1802                            remove it.  The equations used are
1803                            preserve: Output = Toff * (on/off) - Toff
1804                            remove:   Output = Toff * (on/off) - Ton
1805        """
1806        if mask is None: mask = ()
1807        varlist = vars()
1808        on = scantable(self._math._mx_extract(self, 'on'))
1809        preoff = scantable(self._math._mx_extract(self, 'off'))
1810        off = preoff.average_time(mask=mask, weight=weight, scanav=False)
1811        from asapmath  import quotient
1812        q = quotient(on, off, preserve)
1813        q._add_history("mx_quotient", varlist)
1814        print_log()
1815        return q
1816
1817    def freq_switch(self, insitu=None):
1818        """
1819        Apply frequency switching to the data.
1820        Parameters:
1821            insitu:      if False a new scantable is returned.
1822                         Otherwise, the swictching is done in-situ
1823                         The default is taken from .asaprc (False)
1824        Example:
1825            none
1826        """
1827        if insitu is None: insitu = rcParams['insitu']
1828        self._math._setinsitu(insitu)
1829        varlist = vars()
1830        s = scantable(self._math._freqswitch(self))
1831        s._add_history("freq_switch", varlist)
1832        print_log()
1833        if insitu: self._assign(s)
1834        else: return s
1835
1836    def recalc_azel(self):
1837        """
1838        Recalculate the azimuth and elevation for each position.
1839        Parameters:
1840            none
1841        Example:
1842        """
1843        varlist = vars()
1844        self._recalcazel()
1845        self._add_history("recalc_azel", varlist)
1846        print_log()
1847        return
1848
1849    def __add__(self, other):
1850        varlist = vars()
1851        s = None
1852        if isinstance(other, scantable):
1853            s = scantable(self._math._binaryop(self, other, "ADD"))
1854        elif isinstance(other, float):
1855            s = scantable(self._math._unaryop(self, other, "ADD", False))
1856        else:
1857            raise TypeError("Other input is not a scantable or float value")
1858        s._add_history("operator +", varlist)
1859        print_log()
1860        return s
1861
1862    def __sub__(self, other):
1863        """
1864        implicit on all axes and on Tsys
1865        """
1866        varlist = vars()
1867        s = None
1868        if isinstance(other, scantable):
1869            s = scantable(self._math._binaryop(self, other, "SUB"))
1870        elif isinstance(other, float):
1871            s = scantable(self._math._unaryop(self, other, "SUB", False))
1872        else:
1873            raise TypeError("Other input is not a scantable or float value")
1874        s._add_history("operator -", varlist)
1875        print_log()
1876        return s
1877
1878    def __mul__(self, other):
1879        """
1880        implicit on all axes and on Tsys
1881        """
1882        varlist = vars()
1883        s = None
1884        if isinstance(other, scantable):
1885            s = scantable(self._math._binaryop(self, other, "MUL"))
1886        elif isinstance(other, float):
1887            s = scantable(self._math._unaryop(self, other, "MUL", False))
1888        else:
1889            raise TypeError("Other input is not a scantable or float value")
1890        s._add_history("operator *", varlist)
1891        print_log()
1892        return s
1893
1894
1895    def __div__(self, other):
1896        """
1897        implicit on all axes and on Tsys
1898        """
1899        varlist = vars()
1900        s = None
1901        if isinstance(other, scantable):
1902            s = scantable(self._math._binaryop(self, other, "DIV"))
1903        elif isinstance(other, float):
1904            if other == 0.0:
1905                raise ZeroDivisionError("Dividing by zero is not recommended")
1906            s = scantable(self._math._unaryop(self, other, "DIV", False))
1907        else:
1908            raise TypeError("Other input is not a scantable or float value")
1909        s._add_history("operator /", varlist)
1910        print_log()
1911        return s
1912
1913    def get_fit(self, row=0):
1914        """
1915        Print or return the stored fits for a row in the scantable
1916        Parameters:
1917            row:    the row which the fit has been applied to.
1918        """
1919        if row > self.nrow():
1920            return
1921        from asap.asapfit import asapfit
1922        fit = asapfit(self._getfit(row))
1923        if rcParams['verbose']:
1924            print fit
1925            return
1926        else:
1927            return fit.as_dict()
1928
1929    def flag_nans(self):
1930        """
1931        Utility function to flag NaN values in the scantable.
1932        """
1933        import numpy
1934        basesel = self.get_selection()
1935        for i in range(self.nrow()):
1936            sel = selector()+basesel
1937            sel.set_scans(self.getscan(i))
1938            sel.set_beams(self.getbeam(i))
1939            sel.set_ifs(self.getif(i))
1940            sel.set_polarisations(self.getpol(i))
1941            self.set_selection(sel)
1942            nans = numpy.isnan(self._getspectrum(0))
1943        if numpy.any(nans):
1944            bnans = [ bool(v) for v in nans]
1945            self.flag(bnans)
1946        self.set_selection(basesel)
1947       
1948
1949    def _add_history(self, funcname, parameters):
1950        if not rcParams['scantable.history']:
1951            return
1952        # create date
1953        sep = "##"
1954        from datetime import datetime
1955        dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1956        hist = dstr+sep
1957        hist += funcname+sep#cdate+sep
1958        if parameters.has_key('self'): del parameters['self']
1959        for k, v in parameters.iteritems():
1960            if type(v) is dict:
1961                for k2, v2 in v.iteritems():
1962                    hist += k2
1963                    hist += "="
1964                    if isinstance(v2, scantable):
1965                        hist += 'scantable'
1966                    elif k2 == 'mask':
1967                        if isinstance(v2, list) or isinstance(v2, tuple):
1968                            hist += str(self._zip_mask(v2))
1969                        else:
1970                            hist += str(v2)
1971                    else:
1972                        hist += str(v2)
1973            else:
1974                hist += k
1975                hist += "="
1976                if isinstance(v, scantable):
1977                    hist += 'scantable'
1978                elif k == 'mask':
1979                    if isinstance(v, list) or isinstance(v, tuple):
1980                        hist += str(self._zip_mask(v))
1981                    else:
1982                        hist += str(v)
1983                else:
1984                    hist += str(v)
1985            hist += sep
1986        hist = hist[:-2] # remove trailing '##'
1987        self._addhistory(hist)
1988
1989
1990    def _zip_mask(self, mask):
1991        mask = list(mask)
1992        i = 0
1993        segments = []
1994        while mask[i:].count(1):
1995            i += mask[i:].index(1)
1996            if mask[i:].count(0):
1997                j = i + mask[i:].index(0)
1998            else:
1999                j = len(mask)
2000            segments.append([i, j])
2001            i = j
2002        return segments
2003
2004    def _get_ordinate_label(self):
2005        fu = "("+self.get_fluxunit()+")"
2006        import re
2007        lbl = "Intensity"
2008        if re.match(".K.", fu):
2009            lbl = "Brightness Temperature "+ fu
2010        elif re.match(".Jy.", fu):
2011            lbl = "Flux density "+ fu
2012        return lbl
2013
2014    def _check_ifs(self):
2015        nchans = [self.nchan(i) for i in range(self.nif(-1))]
2016        nchans = filter(lambda t: t > 0, nchans)
2017        return (sum(nchans)/len(nchans) == nchans[0])
2018
2019    def _fill(self, names, unit, average, getpt):
2020        import os
2021        from asap._asap import stfiller
2022        first = True
2023        fullnames = []
2024        for name in names:
2025            name = os.path.expandvars(name)
2026            name = os.path.expanduser(name)
2027            if not os.path.exists(name):
2028                msg = "File '%s' does not exists" % (name)
2029                if rcParams['verbose']:
2030                    asaplog.push(msg)
2031                    print asaplog.pop().strip()
2032                    return
2033                raise IOError(msg)
2034            fullnames.append(name)
2035        if average:
2036            asaplog.push('Auto averaging integrations')
2037        stype = int(rcParams['scantable.storage'].lower() == 'disk')
2038        for name in fullnames:
2039            tbl = Scantable(stype)
2040            r = stfiller(tbl)
2041            rx = rcParams['scantable.reference']
2042            r._setreferenceexpr(rx)
2043            msg = "Importing %s..." % (name)
2044            asaplog.push(msg, False)
2045            print_log()
2046            r._open(name, -1, -1, getpt)
2047            r._read()
2048            if average:
2049                tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
2050            if not first:
2051                tbl = self._math._merge([self, tbl])
2052            Scantable.__init__(self, tbl)
2053            r._close()
2054            del r, tbl
2055            first = False
2056        if unit is not None:
2057            self.set_fluxunit(unit)
2058        #self.set_freqframe(rcParams['scantable.freqframe'])
2059
2060    def __getitem__(self, key):
2061        if key < 0:
2062            key += self.nrow()
2063        if key >= self.nrow():
2064            raise IndexError("Row index out of range.")
2065        return self._getspectrum(key)
2066
2067    def __setitem__(self, key, value):
2068        if key < 0:
2069            key += self.nrow()
2070        if key >= self.nrow():
2071            raise IndexError("Row index out of range.")
2072        if not hasattr(value, "__len__") or \
2073                len(value) > self.nchan(self.getif(key)):
2074            raise ValueError("Spectrum length doesn't match.")
2075        return self._setspectrum(value, key)
2076
2077    def __len__(self):
2078        return self.nrow()
2079
2080    def __iter__(self):
2081        for i in range(len(self)):
2082            yield self[i]
Note: See TracBrowser for help on using the repository browser.