source: trunk/python/scantable.py @ 1586

Last change on this file since 1586 was 1586, checked in by Malte Marquarding, 15 years ago

Ticket #165: have removed the hard-coding of parallactifying the data. NOTE THIS breaks the Table structure as I have moved the PARANGLE column from the main table into the FOCUS table. We need to have a new release. Also one needs to explicitly tell the scantable via rc or member function to enable parallactifying

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 71.3 KB
Line 
1from asap._asap import Scantable
2from asap import rcParams
3from asap import print_log
4from asap import asaplog
5from asap import selector
6from asap import linecatalog
7from asap import _n_bools, mask_not, mask_and, mask_or
8
9class scantable(Scantable):
10    """
11        The ASAP container for scans
12    """
13
14    def __init__(self, filename, average=None, unit=None, parallactify=None):
15        """
16        Create a scantable from a saved one or make a reference
17        Parameters:
18            filename:    the name of an asap table on disk
19                         or
20                         the name of a rpfits/sdfits/ms file
21                         (integrations within scans are auto averaged
22                         and the whole file is read)
23                         or
24                         [advanced] a reference to an existing
25                         scantable
26            average:     average all integrations withinb a scan on read.
27                         The default (True) is taken from .asaprc.
28            unit:         brightness unit; must be consistent with K or Jy.
29                         Over-rides the default selected by the reader
30                         (input rpfits/sdfits/ms) or replaces the value
31                         in existing scantables
32            parallactify: Indcicate that the data had been parallatified.
33                          Default is taken form rc file.
34        """
35        if average is None:
36            average = rcParams['scantable.autoaverage']
37        if parallactify is None:
38            parallactify = rcParams['scantable.parallactify']
39        varlist = vars()
40        from asap._asap import stmath
41        self._math = stmath()
42        if isinstance(filename, Scantable):
43            Scantable.__init__(self, filename)
44        else:
45            if isinstance(filename, str):# or \
46#                (isinstance(filename, list) or isinstance(filename, tuple)) \
47#                  and isinstance(filename[-1], str):
48                import os.path
49                filename = os.path.expandvars(filename)
50                filename = os.path.expanduser(filename)
51                if not os.path.exists(filename):
52                    s = "File '%s' not found." % (filename)
53                    if rcParams['verbose']:
54                        asaplog.push(s)
55                        print asaplog.pop().strip()
56                        return
57                    raise IOError(s)
58                if os.path.isdir(filename) \
59                    and not os.path.exists(filename+'/table.f1'):
60                    # crude check if asap table
61                    if os.path.exists(filename+'/table.info'):
62                        ondisk = rcParams['scantable.storage'] == 'disk'
63                        Scantable.__init__(self, filename, ondisk)
64                        if unit is not None:
65                            self.set_fluxunit(unit)
66                        self.set_freqframe(rcParams['scantable.freqframe'])
67                    else:
68                        msg = "The given file '%s'is not a valid " \
69                              "asap table." % (filename)
70                        if rcParams['verbose']:
71                            print msg
72                            return
73                        else:
74                            raise IOError(msg)
75                else:
76                    self._fill([filename], unit, average)
77            elif (isinstance(filename, list) or isinstance(filename, tuple)) \
78                  and isinstance(filename[-1], str):
79                self._fill(filename, unit, average)
80        self.parallactify(parallactify)
81        self._add_history("scantable", varlist)
82        print_log()
83
84    def save(self, name=None, format=None, overwrite=False):
85        """
86        Store the scantable on disk. This can be an asap (aips++) Table,
87        SDFITS or MS2 format.
88        Parameters:
89            name:        the name of the outputfile. For format "ASCII"
90                         this is the root file name (data in 'name'.txt
91                         and header in 'name'_header.txt)
92            format:      an optional file format. Default is ASAP.
93                         Allowed are - 'ASAP' (save as ASAP [aips++] Table),
94                                       'SDFITS' (save as SDFITS file)
95                                       'ASCII' (saves as ascii text file)
96                                       'MS2' (saves as an aips++
97                                              MeasurementSet V2)
98                                       'FITS' (save as image FITS - not
99                                               readable by class)
100                                       'CLASS' (save as FITS readable by CLASS)
101            overwrite:   If the file should be overwritten if it exists.
102                         The default False is to return with warning
103                         without writing the output. USE WITH CARE.
104        Example:
105            scan.save('myscan.asap')
106            scan.save('myscan.sdfits', 'SDFITS')
107        """
108        from os import path
109        if format is None: format = rcParams['scantable.save']
110        suffix = '.'+format.lower()
111        if name is None or name == "":
112            name = 'scantable'+suffix
113            msg = "No filename given. Using default name %s..." % name
114            asaplog.push(msg)
115        name = path.expandvars(name)
116        if path.isfile(name) or path.isdir(name):
117            if not overwrite:
118                msg = "File %s exists." % name
119                if rcParams['verbose']:
120                    print msg
121                    return
122                else:
123                    raise IOError(msg)
124        format2 = format.upper()
125        if format2 == 'ASAP':
126            self._save(name)
127        else:
128            from asap._asap import stwriter as stw
129            writer = stw(format2)
130            writer.write(self, name)
131        print_log()
132        return
133
134    def copy(self):
135        """
136        Return a copy of this scantable.
137        Note:
138            This makes a full (deep) copy. scan2 = scan1 makes a reference.
139        Parameters:
140            none
141        Example:
142            copiedscan = scan.copy()
143        """
144        sd = scantable(Scantable._copy(self))
145        return sd
146
147    def drop_scan(self, scanid=None):
148        """
149        Return a new scantable where the specified scan number(s) has(have)
150        been dropped.
151        Parameters:
152            scanid:    a (list of) scan number(s)
153        """
154        from asap import _is_sequence_or_number as _is_valid
155        from asap import _to_list
156        from asap import unique
157        if not _is_valid(scanid):
158            if rcParams['verbose']:
159                print "Please specify a scanno to drop from the scantable"
160                return
161            else:
162                raise RuntimeError("No scan given")
163        try:
164            scanid = _to_list(scanid)
165            allscans = unique([ self.getscan(i) for i in range(self.nrow())])
166            for sid in scanid: allscans.remove(sid)
167            if len(allscans) == 0:
168                raise ValueError("Can't remove all scans")
169        except ValueError:
170            if rcParams['verbose']:
171                print "Couldn't find any match."
172                return
173            else: raise
174        try:
175            bsel = self.get_selection()
176            sel = selector()
177            sel.set_scans(allscans)
178            self.set_selection(bsel+sel)
179            scopy = self._copy()
180            self.set_selection(bsel)
181            return scantable(scopy)
182        except RuntimeError:
183            if rcParams['verbose']:
184                print "Couldn't find any match."
185            else:
186                raise
187
188
189    def get_scan(self, scanid=None):
190        """
191        Return a specific scan (by scanno) or collection of scans (by
192        source name) in a new scantable.
193        Note:
194            See scantable.drop_scan() for the inverse operation.
195        Parameters:
196            scanid:    a (list of) scanno or a source name, unix-style
197                       patterns are accepted for source name matching, e.g.
198                       '*_R' gets all 'ref scans
199        Example:
200            # get all scans containing the source '323p459'
201            newscan = scan.get_scan('323p459')
202            # get all 'off' scans
203            refscans = scan.get_scan('*_R')
204            # get a susbset of scans by scanno (as listed in scan.summary())
205            newscan = scan.get_scan([0, 2, 7, 10])
206        """
207        if scanid is None:
208            if rcParams['verbose']:
209                print "Please specify a scan no or name to " \
210                      "retrieve from the scantable"
211                return
212            else:
213                raise RuntimeError("No scan given")
214
215        try:
216            bsel = self.get_selection()
217            sel = selector()
218            if type(scanid) is str:
219                sel.set_name(scanid)
220                self.set_selection(bsel+sel)
221                scopy = self._copy()
222                self.set_selection(bsel)
223                return scantable(scopy)
224            elif type(scanid) is int:
225                sel.set_scans([scanid])
226                self.set_selection(bsel+sel)
227                scopy = self._copy()
228                self.set_selection(bsel)
229                return scantable(scopy)
230            elif type(scanid) is list:
231                sel.set_scans(scanid)
232                self.set_selection(sel)
233                scopy = self._copy()
234                self.set_selection(bsel)
235                return scantable(scopy)
236            else:
237                msg = "Illegal scanid type, use 'int' or 'list' if ints."
238                if rcParams['verbose']:
239                    print msg
240                else:
241                    raise TypeError(msg)
242        except RuntimeError:
243            if rcParams['verbose']: print "Couldn't find any match."
244            else: raise
245
246    def __str__(self):
247        return Scantable._summary(self, True)
248
249    def summary(self, filename=None):
250        """
251        Print a summary of the contents of this scantable.
252        Parameters:
253            filename:    the name of a file to write the putput to
254                         Default - no file output
255            verbose:     print extra info such as the frequency table
256                         The default (False) is taken from .asaprc
257        """
258        info = Scantable._summary(self, True)
259        #if verbose is None: verbose = rcParams['scantable.verbosesummary']
260        if filename is not None:
261            if filename is "":
262                filename = 'scantable_summary.txt'
263            from os.path import expandvars, isdir
264            filename = expandvars(filename)
265            if not isdir(filename):
266                data = open(filename, 'w')
267                data.write(info)
268                data.close()
269            else:
270                msg = "Illegal file name '%s'." % (filename)
271                if rcParams['verbose']:
272                    print msg
273                else:
274                    raise IOError(msg)
275        if rcParams['verbose']:
276            try:
277                from IPython.genutils import page as pager
278            except ImportError:
279                from pydoc import pager
280            pager(info)
281        else:
282            return info
283
284    def get_spectrum(self, rowno):
285        """Return the spectrum for the current row in the scantable as a list.
286        Parameters:
287             rowno:   the row number to retrieve the spectrum from
288        """
289        return self._getspectrum(rowno)
290
291    def get_mask(self, rowno):
292        """Return the mask for the current row in the scantable as a list.
293        Parameters:
294             rowno:   the row number to retrieve the mask from
295        """
296        return self._getmask(rowno)
297
298    def set_spectrum(self, spec, rowno):
299        """Return the spectrum for the current row in the scantable as a list.
300        Parameters:
301             spec:   the spectrum
302             rowno:    the row number to set the spectrum for
303        """
304        assert(len(spec) == self.nchan())
305        return self._setspectrum(spec, rowno)
306
307    def get_selection(self):
308        """
309        Get the selection object currently set on this scantable.
310        Parameters:
311            none
312        Example:
313            sel = scan.get_selection()
314            sel.set_ifs(0)              # select IF 0
315            scan.set_selection(sel)     # apply modified selection
316        """
317        return selector(self._getselection())
318
319    def set_selection(self, selection=None, **kw):
320        """
321        Select a subset of the data. All following operations on this scantable
322        are only applied to thi selection.
323        Parameters:
324            selection:    a selector object (default unset the selection)
325        Examples:
326            sel = selector()         # create a selection object
327            self.set_scans([0, 3])    # select SCANNO 0 and 3
328            scan.set_selection(sel)  # set the selection
329            scan.summary()           # will only print summary of scanno 0 an 3
330            scan.set_selection()     # unset the selection
331        """
332        if selection is None:
333            # reset
334            if len(kw) == 0:
335                selection = selector()
336            else:
337                # try keywords
338                for k in kw:
339                    if k not in selector.fields:
340                        raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
341                selection = selector(**kw)
342        self._setselection(selection)
343
344    def stats(self, stat='stddev', mask=None):
345        """
346        Determine the specified statistic of the current beam/if/pol
347        Takes a 'mask' as an optional parameter to specify which
348        channels should be excluded.
349        Parameters:
350            stat:    'min', 'max', 'sumsq', 'sum', 'mean'
351                     'var', 'stddev', 'avdev', 'rms', 'median'
352            mask:    an optional mask specifying where the statistic
353                     should be determined.
354        Example:
355            scan.set_unit('channel')
356            msk = scan.create_mask([100, 200], [500, 600])
357            scan.stats(stat='mean', mask=m)
358        """
359        if mask == None:
360            mask = []
361        axes = ['Beam', 'IF', 'Pol', 'Time']
362        if not self._check_ifs():
363            raise ValueError("Cannot apply mask as the IFs have different "
364                             "number of channels. Please use setselection() "
365                             "to select individual IFs")
366
367        statvals = self._math._stats(self, mask, stat)
368        out = ''
369        axes = []
370        for i in range(self.nrow()):
371            axis = []
372            axis.append(self.getscan(i))
373            axis.append(self.getbeam(i))
374            axis.append(self.getif(i))
375            axis.append(self.getpol(i))
376            axis.append(self.getcycle(i))
377            axes.append(axis)
378            tm = self._gettime(i)
379            src = self._getsourcename(i)
380            out += 'Scan[%d] (%s) ' % (axis[0], src)
381            out += 'Time[%s]:\n' % (tm)
382            if self.nbeam(-1) > 1: out +=  ' Beam[%d] ' % (axis[1])
383            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (axis[2])
384            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (axis[3])
385            out += '= %3.3f\n' % (statvals[i])
386            out +=  "--------------------------------------------------\n"
387
388        if rcParams['verbose']:
389            print "--------------------------------------------------"
390            print " ", stat
391            print "--------------------------------------------------"
392            print out
393        #else:
394            #retval = { 'axesnames': ['scanno', 'beamno', 'ifno', 'polno', 'cycleno'],
395            #           'axes' : axes,
396            #           'data': statvals}
397        return statvals
398
399    def stddev(self, mask=None):
400        """
401        Determine the standard deviation of the current beam/if/pol
402        Takes a 'mask' as an optional parameter to specify which
403        channels should be excluded.
404        Parameters:
405            mask:    an optional mask specifying where the standard
406                     deviation should be determined.
407
408        Example:
409            scan.set_unit('channel')
410            msk = scan.create_mask([100, 200], [500, 600])
411            scan.stddev(mask=m)
412        """
413        return self.stats(stat='stddev', mask=mask);
414
415
416    def get_column_names(self):
417        """
418        Return a  list of column names, which can be used for selection.
419        """
420        return list(Scantable.get_column_names(self))
421
422    def get_tsys(self):
423        """
424        Return the System temperatures.
425        Returns:
426            a list of Tsys values for the current selection
427        """
428
429        return self._row_callback(self._gettsys, "Tsys")
430
431    def _row_callback(self, callback, label):
432        axes = []
433        axesnames = ['scanno', 'beamno', 'ifno', 'polno', 'cycleno']
434        out = ""
435        outvec = []
436        for i in range(self.nrow()):
437            axis = []
438            axis.append(self.getscan(i))
439            axis.append(self.getbeam(i))
440            axis.append(self.getif(i))
441            axis.append(self.getpol(i))
442            axis.append(self.getcycle(i))
443            axes.append(axis)
444            tm = self._gettime(i)
445            src = self._getsourcename(i)
446            out += 'Scan[%d] (%s) ' % (axis[0], src)
447            out += 'Time[%s]:\n' % (tm)
448            if self.nbeam(-1) > 1: out +=  ' Beam[%d] ' % (axis[1])
449            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (axis[2])
450            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (axis[3])
451            outvec.append(callback(i))
452            out += '= %3.3f\n' % (outvec[i])
453            out +=  "--------------------------------------------------\n"
454        if rcParams['verbose']:
455            print "--------------------------------------------------"
456            print " %s" % (label)
457            print "--------------------------------------------------"
458            print out
459        # disabled because the vector seems more useful
460        #retval = {'axesnames': axesnames, 'axes': axes, 'data': outvec}
461        return outvec
462
463    def _get_column(self, callback, row=-1):
464        """
465        """
466        if row == -1:
467            return [callback(i) for i in range(self.nrow())]
468        else:
469            if  0 <= row < self.nrow():
470                return callback(row)
471
472
473    def get_time(self, row=-1, asdatetime=False):
474        """
475        Get a list of time stamps for the observations.
476        Return a datetime object for each integration time stamp in the scantable.
477        Parameters:
478            row:          row no of integration. Default -1 return all rows
479            asdatetime:   return values as datetime objects rather than strings
480        Example:
481            none
482        """
483        from time import strptime
484        from datetime import datetime
485        times = self._get_column(self._gettime, row)
486        if not asdatetime:
487            return times
488        format = "%Y/%m/%d/%H:%M:%S"
489        if isinstance(times, list):
490            return [datetime(*strptime(i, format)[:6]) for i in times]
491        else:
492            return datetime(*strptime(times, format)[:6])
493
494
495    def get_inttime(self, row=-1):
496        """
497        Get a list of integration times for the observations.
498        Return a time in seconds for each integration in the scantable.
499        Parameters:
500            row:    row no of integration. Default -1 return all rows.
501        Example:
502            none
503        """
504        return self._get_column(self._getinttime, row)
505
506
507    def get_sourcename(self, row=-1):
508        """
509        Get a list source names for the observations.
510        Return a string for each integration in the scantable.
511        Parameters:
512            row:    row no of integration. Default -1 return all rows.
513        Example:
514            none
515        """
516        return self._get_column(self._getsourcename, row)
517
518    def get_elevation(self, row=-1):
519        """
520        Get a list of elevations for the observations.
521        Return a float for each integration in the scantable.
522        Parameters:
523            row:    row no of integration. Default -1 return all rows.
524        Example:
525            none
526        """
527        return self._get_column(self._getelevation, row)
528
529    def get_azimuth(self, row=-1):
530        """
531        Get a list of azimuths for the observations.
532        Return a float for each integration in the scantable.
533        Parameters:
534            row:    row no of integration. Default -1 return all rows.
535        Example:
536            none
537        """
538        return self._get_column(self._getazimuth, row)
539
540    def get_parangle(self, row=-1):
541        """
542        Get a list of parallactic angles for the observations.
543        Return a float for each integration in the scantable.
544        Parameters:
545            row:    row no of integration. Default -1 return all rows.
546        Example:
547            none
548        """
549        return self._get_column(self._getparangle, row)
550
551    def get_direction(self, row=-1):
552        """
553        Get a list of Positions on the sky (direction) for the observations.
554        Return a float for each integration in the scantable.
555        Parameters:
556            row:    row no of integration. Default -1 return all rows
557        Example:
558            none
559        """
560        return self._get_column(self._getdirection, row)
561
562    def get_directionval(self, row=-1):
563        """
564        Get a list of Positions on the sky (direction) for the observations.
565        Return a float for each integration in the scantable.
566        Parameters:
567            row:    row no of integration. Default -1 return all rows
568        Example:
569            none
570        """
571        return self._get_column(self._getdirectionvec, row)
572
573
574    def set_unit(self, unit='channel'):
575        """
576        Set the unit for all following operations on this scantable
577        Parameters:
578            unit:    optional unit, default is 'channel'
579                     one of '*Hz', 'km/s', 'channel', ''
580        """
581        varlist = vars()
582        if unit in ['', 'pixel', 'channel']:
583            unit = ''
584        inf = list(self._getcoordinfo())
585        inf[0] = unit
586        self._setcoordinfo(inf)
587        self._add_history("set_unit", varlist)
588
589    def set_instrument(self, instr):
590        """
591        Set the instrument for subsequent processing.
592        Parameters:
593            instr:    Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
594                      'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
595        """
596        self._setInstrument(instr)
597        self._add_history("set_instument", vars())
598        print_log()
599
600    def set_feedtype(self, feedtype):
601        """
602        Overwrite the feed type, which might not be set correctly.
603        Parameters:
604            feedtype:     'linear' or 'circular'
605        """
606        self._setfeedtype(feedtype)
607        self._add_history("set_feedtype", vars())
608        print_log()
609
610    def set_doppler(self, doppler='RADIO'):
611        """
612        Set the doppler for all following operations on this scantable.
613        Parameters:
614            doppler:    One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
615        """
616        varlist = vars()
617        inf = list(self._getcoordinfo())
618        inf[2] = doppler
619        self._setcoordinfo(inf)
620        self._add_history("set_doppler", vars())
621        print_log()
622
623    def set_freqframe(self, frame=None):
624        """
625        Set the frame type of the Spectral Axis.
626        Parameters:
627            frame:   an optional frame type, default 'LSRK'. Valid frames are:
628                     'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
629                     'GEO', 'GALACTO', 'LGROUP', 'CMB'
630        Examples:
631            scan.set_freqframe('BARY')
632        """
633        if frame is None: frame = rcParams['scantable.freqframe']
634        varlist = vars()
635        valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
636                   'GEO', 'GALACTO', 'LGROUP', 'CMB']
637
638        if frame in valid:
639            inf = list(self._getcoordinfo())
640            inf[1] = frame
641            self._setcoordinfo(inf)
642            self._add_history("set_freqframe", varlist)
643        else:
644            msg  = "Please specify a valid freq type. Valid types are:\n", valid
645            if rcParams['verbose']:
646                print msg
647            else:
648                raise TypeError(msg)
649        print_log()
650
651    def set_dirframe(self, frame=""):
652        """
653        Set the frame type of the Direction on the sky.
654        Parameters:
655            frame:   an optional frame type, default ''. Valid frames are:
656                     'J2000', 'B1950', 'GALACTIC'
657        Examples:
658            scan.set_dirframe('GALACTIC')
659        """
660        varlist = vars()
661        try:
662            Scantable.set_dirframe(self, frame)
663        except RuntimeError, msg:
664            if rcParams['verbose']:
665                print msg
666            else:
667                raise
668        self._add_history("set_dirframe", varlist)
669
670    def get_unit(self):
671        """
672        Get the default unit set in this scantable
673        Returns:
674            A unit string
675        """
676        inf = self._getcoordinfo()
677        unit = inf[0]
678        if unit == '': unit = 'channel'
679        return unit
680
681    def get_abcissa(self, rowno=0):
682        """
683        Get the abcissa in the current coordinate setup for the currently
684        selected Beam/IF/Pol
685        Parameters:
686            rowno:    an optional row number in the scantable. Default is the
687                      first row, i.e. rowno=0
688        Returns:
689            The abcissa values and the format string (as a dictionary)
690        """
691        abc = self._getabcissa(rowno)
692        lbl = self._getabcissalabel(rowno)
693        print_log()
694        return abc, lbl
695
696    def flag(self, mask=None):
697        """
698        Flag the selected data using an optional channel mask.
699        Parameters:
700            mask:   an optional channel mask, created with create_mask. Default
701                    (no mask) is all channels.
702        """
703        varlist = vars()
704        if mask is None:
705            mask = []
706        try:
707            self._flag(mask)
708        except RuntimeError, msg:
709            if rcParams['verbose']:
710                print msg
711                return
712            else: raise
713        self._add_history("flag", varlist)
714
715    def lag_flag(self, start, end, unit="MHz", insitu=None):
716        """
717        Flag the data in 'lag' space by providing a frequency to remove.
718        Flagged data in the scantable gets interpolated over the region.
719        No taper is applied.
720        Parameters:
721            start:    the start frequency (really a period within the
722                      bandwidth)  or period to remove
723            end:      the end frequency or period to remove
724            unit:     the frequency unit (default "MHz") or "" for
725                      explicit lag channels
726        Notes:
727            It is recommended to flag edges of the band or strong
728            signals beforehand.
729        """
730        if insitu is None: insitu = rcParams['insitu']
731        self._math._setinsitu(insitu)
732        varlist = vars()
733        base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
734        if not (unit == "" or base.has_key(unit)):
735            raise ValueError("%s is not a valid unit." % unit)
736        try:
737            if unit == "":
738                s = scantable(self._math._lag_flag(self, start, end, "lags"))
739            else:
740                s = scantable(self._math._lag_flag(self, start*base[unit],
741                                                   end*base[unit], "frequency"))
742        except RuntimeError, msg:
743            if rcParams['verbose']:
744                print msg
745                return
746            else: raise
747        s._add_history("lag_flag", varlist)
748        print_log()
749        if insitu:
750            self._assign(s)
751        else:
752            return s
753
754
755    def create_mask(self, *args, **kwargs):
756        """
757        Compute and return a mask based on [min, max] windows.
758        The specified windows are to be INCLUDED, when the mask is
759        applied.
760        Parameters:
761            [min, max], [min2, max2], ...
762                Pairs of start/end points (inclusive)specifying the regions
763                to be masked
764            invert:     optional argument. If specified as True,
765                        return an inverted mask, i.e. the regions
766                        specified are EXCLUDED
767            row:        create the mask using the specified row for
768                        unit conversions, default is row=0
769                        only necessary if frequency varies over rows.
770        Example:
771            scan.set_unit('channel')
772            a)
773            msk = scan.create_mask([400, 500], [800, 900])
774            # masks everything outside 400 and 500
775            # and 800 and 900 in the unit 'channel'
776
777            b)
778            msk = scan.create_mask([400, 500], [800, 900], invert=True)
779            # masks the regions between 400 and 500
780            # and 800 and 900 in the unit 'channel'
781            c)
782            mask only channel 400
783            msk =  scan.create_mask([400])
784        """
785        row = kwargs.get("row", 0)
786        data = self._getabcissa(row)
787        u = self._getcoordinfo()[0]
788        if rcParams['verbose']:
789            if u == "": u = "channel"
790            msg = "The current mask window unit is %s" % u
791            i = self._check_ifs()
792            if not i:
793                msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
794            asaplog.push(msg)
795        n = self.nchan()
796        msk = _n_bools(n, False)
797        # test if args is a 'list' or a 'normal *args - UGLY!!!
798
799        ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
800             and args or args[0]
801        for window in ws:
802            if len(window) == 1:
803                window = [window[0], window[0]]
804            if len(window) == 0 or  len(window) > 2:
805                raise ValueError("A window needs to be defined as [start(, end)]")
806            if window[0] > window[1]:
807                tmp = window[0]
808                window[0] = window[1]
809                window[1] = tmp
810            for i in range(n):
811                if data[i] >= window[0] and data[i] <= window[1]:
812                    msk[i] = True
813        if kwargs.has_key('invert'):
814            if kwargs.get('invert'):
815                msk = mask_not(msk)
816        print_log()
817        return msk
818
819    def get_restfreqs(self):
820        """
821        Get the restfrequency(s) stored in this scantable.
822        The return value(s) are always of unit 'Hz'
823        Parameters:
824            none
825        Returns:
826            a list of doubles
827        """
828        return list(self._getrestfreqs())
829
830
831    def set_restfreqs(self, freqs=None, unit='Hz'):
832        """
833        Set or replace the restfrequency specified and
834        If the 'freqs' argument holds a scalar,
835        then that rest frequency will be applied to all the selected
836        data.  If the 'freqs' argument holds
837        a vector, then it MUST be of equal or smaller length than
838        the number of IFs (and the available restfrequencies will be
839        replaced by this vector).  In this case, *all* data have
840        the restfrequency set per IF according
841        to the corresponding value you give in the 'freqs' vector.
842        E.g. 'freqs=[1e9, 2e9]'  would mean IF 0 gets restfreq 1e9 and
843        IF 1 gets restfreq 2e9.
844        You can also specify the frequencies via a linecatalog.
845
846        Parameters:
847            freqs:   list of rest frequency values or string idenitfiers
848            unit:    unit for rest frequency (default 'Hz')
849
850        Example:
851            # set the given restfrequency for the whole table
852            scan.set_restfreqs(freqs=1.4e9)
853            # If thee number of IFs in the data is >= 2 IF0 gets the first
854            # value IF1 the second...
855            scan.set_restfreqs(freqs=[1.4e9, 1.67e9])
856            #set the given restfrequency for the whole table (by name)
857            scan.set_restfreqs(freqs="OH1667")
858
859        Note:
860            To do more sophisticate Restfrequency setting, e.g. on a
861            source and IF basis, use scantable.set_selection() before using
862            this function.
863            # provide your scantable is call scan
864            selection = selector()
865            selection.set_name("ORION*")
866            selection.set_ifs([1])
867            scan.set_selection(selection)
868            scan.set_restfreqs(freqs=86.6e9)
869
870        """
871        varlist = vars()
872        from asap import linecatalog
873        # simple  value
874        if isinstance(freqs, int) or isinstance(freqs, float):
875            self._setrestfreqs(freqs, "",unit)
876        # list of values
877        elif isinstance(freqs, list) or isinstance(freqs, tuple):
878            # list values are scalars
879            if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
880                sel = selector()
881                savesel = self._getselection()
882                iflist = self.getifnos()
883                for i in xrange(len(freqs)):
884                    sel.set_ifs(iflist[i])
885                    self._setselection(sel)
886                    self._setrestfreqs(freqs[i], "",unit)
887                self._setselection(savesel)
888            # list values are tuples, (value, name)
889            elif isinstance(freqs[-1], dict):
890                sel = selector()
891                savesel = self._getselection()
892                iflist = self.getifnos()
893                for i in xrange(len(freqs)):
894                    sel.set_ifs(iflist[i])
895                    self._setselection(sel)
896                    self._setrestfreqs(freqs[i]["value"],
897                                       freqs[i]["name"], "MHz")
898                self._setselection(savesel)
899        # freqs are to be taken from a linecatalog
900        elif isinstance(freqs, linecatalog):
901            sel = selector()
902            savesel = self._getselection()
903            iflist = self.getifnos()
904            for i in xrange(freqs.nrow()):
905                sel.set_ifs(iflist[i])
906                self._setselection(sel)
907                self._setrestfreqs(freqs.get_frequency(i),
908                                   freqs.get_name(i), "MHz")
909                # ensure that we are not iterating past nIF
910                if i == self.nif()-1: break
911            self._setselection(savesel)
912        else:
913            return
914        self._add_history("set_restfreqs", varlist)
915
916    def shift_refpix(self, delta):
917        """
918        Shift the reference pixel of the Spectra Coordinate by an
919        integer amount.
920        Parameters:
921            delta:   the amount to shift by
922        Note:
923            Be careful using this with broadband data.
924        """
925        Scantable.shift(self, delta)
926
927    def history(self, filename=None):
928        """
929        Print the history. Optionally to a file.
930        Parameters:
931            filename:    The name  of the file to save the history to.
932        """
933        hist = list(self._gethistory())
934        out = "-"*80
935        for h in hist:
936            if h.startswith("---"):
937                out += "\n"+h
938            else:
939                items = h.split("##")
940                date = items[0]
941                func = items[1]
942                items = items[2:]
943                out += "\n"+date+"\n"
944                out += "Function: %s\n  Parameters:" % (func)
945                for i in items:
946                    s = i.split("=")
947                    out += "\n   %s = %s" % (s[0], s[1])
948                out += "\n"+"-"*80
949        if filename is not None:
950            if filename is "":
951                filename = 'scantable_history.txt'
952            import os
953            filename = os.path.expandvars(os.path.expanduser(filename))
954            if not os.path.isdir(filename):
955                data = open(filename, 'w')
956                data.write(out)
957                data.close()
958            else:
959                msg = "Illegal file name '%s'." % (filename)
960                if rcParams['verbose']:
961                    print msg
962                else:
963                    raise IOError(msg)
964        if rcParams['verbose']:
965            try:
966                from IPython.genutils import page as pager
967            except ImportError:
968                from pydoc import pager
969            pager(out)
970        else:
971            return out
972        return
973    #
974    # Maths business
975    #
976
977    def average_time(self, mask=None, scanav=False, weight='tint', align=False):
978        """
979        Return the (time) weighted average of a scan.
980        Note:
981            in channels only - align if necessary
982        Parameters:
983            mask:     an optional mask (only used for 'var' and 'tsys'
984                      weighting)
985            scanav:   True averages each scan separately
986                      False (default) averages all scans together,
987            weight:   Weighting scheme.
988                      'none'     (mean no weight)
989                      'var'      (1/var(spec) weighted)
990                      'tsys'     (1/Tsys**2 weighted)
991                      'tint'     (integration time weighted)
992                      'tintsys'  (Tint/Tsys**2)
993                      'median'   ( median averaging)
994                      The default is 'tint'
995            align:    align the spectra in velocity before averaging. It takes
996                      the time of the first spectrum as reference time.
997        Example:
998            # time average the scantable without using a mask
999            newscan = scan.average_time()
1000        """
1001        varlist = vars()
1002        if weight is None: weight = 'TINT'
1003        if mask is None: mask = ()
1004        if scanav: scanav = "SCAN"
1005        else: scanav = "NONE"
1006        scan = (self, )
1007        try:
1008            if align:
1009                scan = (self.freq_align(insitu=False), )
1010            s = None
1011            if weight.upper() == 'MEDIAN':
1012                s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
1013                                                         scanav))
1014            else:
1015                s = scantable(self._math._average(scan, mask, weight.upper(),
1016                              scanav))
1017        except RuntimeError, msg:
1018            if rcParams['verbose']:
1019                print msg
1020                return
1021            else: raise
1022        s._add_history("average_time", varlist)
1023        print_log()
1024        return s
1025
1026    def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
1027        """
1028        Return a scan where all spectra are converted to either
1029        Jansky or Kelvin depending upon the flux units of the scan table.
1030        By default the function tries to look the values up internally.
1031        If it can't find them (or if you want to over-ride), you must
1032        specify EITHER jyperk OR eta (and D which it will try to look up
1033        also if you don't set it). jyperk takes precedence if you set both.
1034        Parameters:
1035            jyperk:      the Jy / K conversion factor
1036            eta:         the aperture efficiency
1037            d:           the geomtric diameter (metres)
1038            insitu:      if False a new scantable is returned.
1039                         Otherwise, the scaling is done in-situ
1040                         The default is taken from .asaprc (False)
1041        """
1042        if insitu is None: insitu = rcParams['insitu']
1043        self._math._setinsitu(insitu)
1044        varlist = vars()
1045        if jyperk is None: jyperk = -1.0
1046        if d is None: d = -1.0
1047        if eta is None: eta = -1.0
1048        s = scantable(self._math._convertflux(self, d, eta, jyperk))
1049        s._add_history("convert_flux", varlist)
1050        print_log()
1051        if insitu: self._assign(s)
1052        else: return s
1053
1054    def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1055        """
1056        Return a scan after applying a gain-elevation correction.
1057        The correction can be made via either a polynomial or a
1058        table-based interpolation (and extrapolation if necessary).
1059        You specify polynomial coefficients, an ascii table or neither.
1060        If you specify neither, then a polynomial correction will be made
1061        with built in coefficients known for certain telescopes (an error
1062        will occur if the instrument is not known).
1063        The data and Tsys are *divided* by the scaling factors.
1064        Parameters:
1065            poly:        Polynomial coefficients (default None) to compute a
1066                         gain-elevation correction as a function of
1067                         elevation (in degrees).
1068            filename:    The name of an ascii file holding correction factors.
1069                         The first row of the ascii file must give the column
1070                         names and these MUST include columns
1071                         "ELEVATION" (degrees) and "FACTOR" (multiply data
1072                         by this) somewhere.
1073                         The second row must give the data type of the
1074                         column. Use 'R' for Real and 'I' for Integer.
1075                         An example file would be
1076                         (actual factors are arbitrary) :
1077
1078                         TIME ELEVATION FACTOR
1079                         R R R
1080                         0.1 0 0.8
1081                         0.2 20 0.85
1082                         0.3 40 0.9
1083                         0.4 60 0.85
1084                         0.5 80 0.8
1085                         0.6 90 0.75
1086            method:      Interpolation method when correcting from a table.
1087                         Values are  "nearest", "linear" (default), "cubic"
1088                         and "spline"
1089            insitu:      if False a new scantable is returned.
1090                         Otherwise, the scaling is done in-situ
1091                         The default is taken from .asaprc (False)
1092        """
1093
1094        if insitu is None: insitu = rcParams['insitu']
1095        self._math._setinsitu(insitu)
1096        varlist = vars()
1097        if poly is None:
1098            poly = ()
1099        from os.path import expandvars
1100        filename = expandvars(filename)
1101        s = scantable(self._math._gainel(self, poly, filename, method))
1102        s._add_history("gain_el", varlist)
1103        print_log()
1104        if insitu: self._assign(s)
1105        else: return s
1106
1107    def freq_align(self, reftime=None, method='cubic', insitu=None):
1108        """
1109        Return a scan where all rows have been aligned in frequency/velocity.
1110        The alignment frequency frame (e.g. LSRK) is that set by function
1111        set_freqframe.
1112        Parameters:
1113            reftime:     reference time to align at. By default, the time of
1114                         the first row of data is used.
1115            method:      Interpolation method for regridding the spectra.
1116                         Choose from "nearest", "linear", "cubic" (default)
1117                         and "spline"
1118            insitu:      if False a new scantable is returned.
1119                         Otherwise, the scaling is done in-situ
1120                         The default is taken from .asaprc (False)
1121        """
1122        if insitu is None: insitu = rcParams["insitu"]
1123        self._math._setinsitu(insitu)
1124        varlist = vars()
1125        if reftime is None: reftime = ""
1126        s = scantable(self._math._freq_align(self, reftime, method))
1127        s._add_history("freq_align", varlist)
1128        print_log()
1129        if insitu: self._assign(s)
1130        else: return s
1131
1132    def opacity(self, tau, insitu=None):
1133        """
1134        Apply an opacity correction. The data
1135        and Tsys are multiplied by the correction factor.
1136        Parameters:
1137            tau:         Opacity from which the correction factor is
1138                         exp(tau*ZD)
1139                         where ZD is the zenith-distance
1140            insitu:      if False a new scantable is returned.
1141                         Otherwise, the scaling is done in-situ
1142                         The default is taken from .asaprc (False)
1143        """
1144        if insitu is None: insitu = rcParams['insitu']
1145        self._math._setinsitu(insitu)
1146        varlist = vars()
1147        s = scantable(self._math._opacity(self, tau))
1148        s._add_history("opacity", varlist)
1149        print_log()
1150        if insitu: self._assign(s)
1151        else: return s
1152
1153    def bin(self, width=5, insitu=None):
1154        """
1155        Return a scan where all spectra have been binned up.
1156        Parameters:
1157            width:       The bin width (default=5) in pixels
1158            insitu:      if False a new scantable is returned.
1159                         Otherwise, the scaling is done in-situ
1160                         The default is taken from .asaprc (False)
1161        """
1162        if insitu is None: insitu = rcParams['insitu']
1163        self._math._setinsitu(insitu)
1164        varlist = vars()
1165        s = scantable(self._math._bin(self, width))
1166        s._add_history("bin", varlist)
1167        print_log()
1168        if insitu: self._assign(s)
1169        else: return s
1170
1171
1172    def resample(self, width=5, method='cubic', insitu=None):
1173        """
1174        Return a scan where all spectra have been binned up.
1175
1176        Parameters:
1177            width:       The bin width (default=5) in pixels
1178            method:      Interpolation method when correcting from a table.
1179                         Values are  "nearest", "linear", "cubic" (default)
1180                         and "spline"
1181            insitu:      if False a new scantable is returned.
1182                         Otherwise, the scaling is done in-situ
1183                         The default is taken from .asaprc (False)
1184        """
1185        if insitu is None: insitu = rcParams['insitu']
1186        self._math._setinsitu(insitu)
1187        varlist = vars()
1188        s = scantable(self._math._resample(self, method, width))
1189        s._add_history("resample", varlist)
1190        print_log()
1191        if insitu: self._assign(s)
1192        else: return s
1193
1194
1195    def average_pol(self, mask=None, weight='none'):
1196        """
1197        Average the Polarisations together.
1198        Parameters:
1199            mask:        An optional mask defining the region, where the
1200                         averaging will be applied. The output will have all
1201                         specified points masked.
1202            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1203                         weighted), or 'tsys' (1/Tsys**2 weighted)
1204        """
1205        varlist = vars()
1206        if mask is None:
1207            mask = ()
1208        s = scantable(self._math._averagepol(self, mask, weight.upper()))
1209        s._add_history("average_pol", varlist)
1210        print_log()
1211        return s
1212
1213    def average_beam(self, mask=None, weight='none'):
1214        """
1215        Average the Beams together.
1216        Parameters:
1217            mask:        An optional mask defining the region, where the
1218                         averaging will be applied. The output will have all
1219                         specified points masked.
1220            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1221                         weighted), or 'tsys' (1/Tsys**2 weighted)
1222        """
1223        varlist = vars()
1224        if mask is None:
1225            mask = ()
1226        s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1227        s._add_history("average_beam", varlist)
1228        print_log()
1229        return s
1230
1231    def parallactify(self, pflag):
1232        varlist = vars()
1233        self._parallactify(pflag)
1234        self._add_history("parallactify", varlist)
1235
1236    def convert_pol(self, poltype=None):
1237        """
1238        Convert the data to a different polarisation type.
1239        Note that you will need cross-polarisation terms for most conversions.
1240        Parameters:
1241            poltype:    The new polarisation type. Valid types are:
1242                        "linear", "circular", "stokes" and "linpol"
1243        """
1244        varlist = vars()
1245        try:
1246            s = scantable(self._math._convertpol(self, poltype))
1247        except RuntimeError, msg:
1248            if rcParams['verbose']:
1249                print msg
1250                return
1251            else:
1252                raise
1253        s._add_history("convert_pol", varlist)
1254        print_log()
1255        return s
1256
1257    def smooth(self, kernel="hanning", width=5.0, order=2, insitu=None):
1258        """
1259        Smooth the spectrum by the specified kernel (conserving flux).
1260        Parameters:
1261            kernel:     The type of smoothing kernel. Select from
1262                        'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1263                        or 'poly'
1264            width:      The width of the kernel in pixels. For hanning this is
1265                        ignored otherwise it defauls to 5 pixels.
1266                        For 'gaussian' it is the Full Width Half
1267                        Maximum. For 'boxcar' it is the full width.
1268                        For 'rmedian' and 'poly' it is the half width.
1269            order:      Optional parameter for 'poly' kernel (default is 2), to
1270                        specify the order of the polnomial. Ignored by all other
1271                        kernels.
1272            insitu:     if False a new scantable is returned.
1273                        Otherwise, the scaling is done in-situ
1274                        The default is taken from .asaprc (False)
1275        Example:
1276             none
1277        """
1278        if insitu is None: insitu = rcParams['insitu']
1279        self._math._setinsitu(insitu)
1280        varlist = vars()
1281        s = scantable(self._math._smooth(self, kernel.lower(), width, order))
1282        s._add_history("smooth", varlist)
1283        print_log()
1284        if insitu: self._assign(s)
1285        else: return s
1286
1287
1288    def poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None):
1289        """
1290        Return a scan which has been baselined (all rows) by a polynomial.
1291        Parameters:
1292            mask:       an optional mask
1293            order:      the order of the polynomial (default is 0)
1294            plot:       plot the fit and the residual. In this each
1295                        indivual fit has to be approved, by typing 'y'
1296                        or 'n'
1297            uselin:     use linear polynomial fit
1298            insitu:     if False a new scantable is returned.
1299                        Otherwise, the scaling is done in-situ
1300                        The default is taken from .asaprc (False)
1301        Example:
1302            # return a scan baselined by a third order polynomial,
1303            # not using a mask
1304            bscan = scan.poly_baseline(order=3)
1305        """
1306        if insitu is None: insitu = rcParams['insitu']
1307        varlist = vars()
1308        if mask is None:
1309            mask = [True for i in xrange(self.nchan(-1))]
1310        from asap.asapfitter import fitter
1311        try:
1312            f = fitter()
1313            f.set_scan(self, mask)
1314            #f.set_function(poly=order)
1315            if uselin:
1316                f.set_function(lpoly=order)
1317            else:
1318                f.set_function(poly=order)
1319            s = f.auto_fit(insitu, plot=plot)
1320            s._add_history("poly_baseline", varlist)
1321            print_log()
1322            if insitu: self._assign(s)
1323            else: return s
1324        except RuntimeError:
1325            msg = "The fit failed, possibly because it didn't converge."
1326            if rcParams['verbose']:
1327                print msg
1328                return
1329            else:
1330                raise RuntimeError(msg)
1331
1332
1333    def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
1334                           threshold=3, chan_avg_limit=1, plot=False,
1335                           insitu=None):
1336        """
1337        Return a scan which has been baselined (all rows) by a polynomial.
1338        Spectral lines are detected first using linefinder and masked out
1339        to avoid them affecting the baseline solution.
1340
1341        Parameters:
1342            mask:       an optional mask retreived from scantable
1343            edge:       an optional number of channel to drop at
1344                        the edge of spectrum. If only one value is
1345                        specified, the same number will be dropped from
1346                        both sides of the spectrum. Default is to keep
1347                        all channels. Nested tuples represent individual
1348                        edge selection for different IFs (a number of spectral
1349                        channels can be different)
1350            order:      the order of the polynomial (default is 0)
1351            threshold:  the threshold used by line finder. It is better to
1352                        keep it large as only strong lines affect the
1353                        baseline solution.
1354            chan_avg_limit:
1355                        a maximum number of consequtive spectral channels to
1356                        average during the search of weak and broad lines.
1357                        The default is no averaging (and no search for weak
1358                        lines). If such lines can affect the fitted baseline
1359                        (e.g. a high order polynomial is fitted), increase this
1360                        parameter (usually values up to 8 are reasonable). Most
1361                        users of this method should find the default value
1362                        sufficient.
1363            plot:       plot the fit and the residual. In this each
1364                        indivual fit has to be approved, by typing 'y'
1365                        or 'n'
1366            insitu:     if False a new scantable is returned.
1367                        Otherwise, the scaling is done in-situ
1368                        The default is taken from .asaprc (False)
1369
1370        Example:
1371            scan2=scan.auto_poly_baseline(order=7)
1372        """
1373        if insitu is None: insitu = rcParams['insitu']
1374        varlist = vars()
1375        from asap.asapfitter import fitter
1376        from asap.asaplinefind import linefinder
1377        from asap import _is_sequence_or_number as _is_valid
1378
1379        # check whether edge is set up for each IF individually
1380        individualedge = False;
1381        if len(edge) > 1:
1382            if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1383                individualedge = True;
1384
1385        if not _is_valid(edge, int) and not individualedge:
1386            raise ValueError, "Parameter 'edge' has to be an integer or a \
1387            pair of integers specified as a tuple. Nested tuples are allowed \
1388            to make individual selection for different IFs."
1389
1390        curedge = (0, 0)
1391        if individualedge:
1392            for edgepar in edge:
1393                if not _is_valid(edgepar, int):
1394                    raise ValueError, "Each element of the 'edge' tuple has \
1395                                       to be a pair of integers or an integer."
1396        else:
1397            curedge = edge;
1398
1399        # setup fitter
1400        f = fitter()
1401        f.set_function(poly=order)
1402
1403        # setup line finder
1404        fl = linefinder()
1405        fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
1406
1407        if not insitu:
1408            workscan = self.copy()
1409        else:
1410            workscan = self
1411
1412        fl.set_scan(workscan)
1413
1414        rows = range(workscan.nrow())
1415        asaplog.push("Processing:")
1416        for r in rows:
1417            msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1418                (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1419                 workscan.getpol(r), workscan.getcycle(r))
1420            asaplog.push(msg, False)
1421
1422            # figure out edge parameter
1423            if individualedge:
1424                if len(edge) >= workscan.getif(r):
1425                    raise RuntimeError, "Number of edge elements appear to " \
1426                                        "be less than the number of IFs"
1427                    curedge = edge[workscan.getif(r)]
1428
1429            # setup line finder
1430            fl.find_lines(r, mask, curedge)
1431            f.set_data(workscan._getabcissa(r),  workscan._getspectrum(r),
1432                        mask_and(workscan._getmask(r), fl.get_mask()))
1433            f.fit()
1434            x = f.get_parameters()
1435            if plot:
1436                f.plot(residual=True)
1437                x = raw_input("Accept fit ( [y]/n ): ")
1438                if x.upper() == 'N':
1439                    continue
1440            workscan._setspectrum(f.fitter.getresidual(), r)
1441        if plot:
1442            f._p.unmap()
1443            f._p = None
1444        workscan._add_history("auto_poly_baseline", varlist)
1445        if insitu:
1446            self._assign(workscan)
1447        else:
1448            return workscan
1449
1450    def rotate_linpolphase(self, angle):
1451        """
1452        Rotate the phase of the complex polarization O=Q+iU correlation.
1453        This is always done in situ in the raw data.  So if you call this
1454        function more than once then each call rotates the phase further.
1455        Parameters:
1456            angle:   The angle (degrees) to rotate (add) by.
1457        Examples:
1458            scan.rotate_linpolphase(2.3)
1459        """
1460        varlist = vars()
1461        self._math._rotate_linpolphase(self, angle)
1462        self._add_history("rotate_linpolphase", varlist)
1463        print_log()
1464        return
1465
1466
1467    def rotate_xyphase(self, angle):
1468        """
1469        Rotate the phase of the XY correlation.  This is always done in situ
1470        in the data.  So if you call this function more than once
1471        then each call rotates the phase further.
1472        Parameters:
1473            angle:   The angle (degrees) to rotate (add) by.
1474        Examples:
1475            scan.rotate_xyphase(2.3)
1476        """
1477        varlist = vars()
1478        self._math._rotate_xyphase(self, angle)
1479        self._add_history("rotate_xyphase", varlist)
1480        print_log()
1481        return
1482
1483    def swap_linears(self):
1484        """
1485        Swap the linear polarisations XX and YY, or better the first two
1486        polarisations as this also works for ciculars.
1487        """
1488        varlist = vars()
1489        self._math._swap_linears(self)
1490        self._add_history("swap_linears", varlist)
1491        print_log()
1492        return
1493
1494    def invert_phase(self):
1495        """
1496        Invert the phase of the complex polarisation
1497        """
1498        varlist = vars()
1499        self._math._invert_phase(self)
1500        self._add_history("invert_phase", varlist)
1501        print_log()
1502        return
1503
1504    def add(self, offset, insitu=None):
1505        """
1506        Return a scan where all spectra have the offset added
1507        Parameters:
1508            offset:      the offset
1509            insitu:      if False a new scantable is returned.
1510                         Otherwise, the scaling is done in-situ
1511                         The default is taken from .asaprc (False)
1512        """
1513        if insitu is None: insitu = rcParams['insitu']
1514        self._math._setinsitu(insitu)
1515        varlist = vars()
1516        s = scantable(self._math._unaryop(self, offset, "ADD", False))
1517        s._add_history("add", varlist)
1518        print_log()
1519        if insitu:
1520            self._assign(s)
1521        else:
1522            return s
1523
1524    def scale(self, factor, tsys=True, insitu=None):
1525        """
1526        Return a scan where all spectra are scaled by the give 'factor'
1527        Parameters:
1528            factor:      the scaling factor
1529            insitu:      if False a new scantable is returned.
1530                         Otherwise, the scaling is done in-situ
1531                         The default is taken from .asaprc (False)
1532            tsys:        if True (default) then apply the operation to Tsys
1533                         as well as the data
1534        """
1535        if insitu is None: insitu = rcParams['insitu']
1536        self._math._setinsitu(insitu)
1537        varlist = vars()
1538        s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
1539        s._add_history("scale", varlist)
1540        print_log()
1541        if insitu:
1542            self._assign(s)
1543        else:
1544            return s
1545
1546    def set_sourcetype(self, match, matchtype="pattern",
1547                       sourcetype="reference"):
1548        """
1549        Set the type of the source to be an source or reference scan
1550        using the provided pattern:
1551        Parameters:
1552            match:          a Unix style pattern, regular expression or selector
1553            matchtype:      'pattern' (default) UNIX style pattern or
1554                            'regex' regular expression
1555            sourcetype:     the type of the source to use (source/reference)
1556        """
1557        varlist = vars()
1558        basesel = self.get_selection()
1559        stype = -1
1560        if sourcetype.lower().startswith("r"):
1561            stype = 1
1562        elif sourcetype.lower().startswith("s"):
1563            stype = 0
1564        else:
1565            raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
1566        if matchtype.lower().startswith("p"):
1567            matchtype = "pattern"
1568        elif matchtype.lower().startswith("r"):
1569            matchtype = "regex"
1570        else:
1571            raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
1572        sel = selector()
1573        if isinstance(match, selector):
1574            sel = match
1575        else:
1576            sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
1577        self.set_selection(basesel+sel)
1578        self._setsourcetype(stype)
1579        self.set_selection(basesel)
1580        self._add_history("set_sourcetype", varlist)
1581
1582    def auto_quotient(self, preserve=True, mode='paired'):
1583        """
1584        This function allows to build quotients automatically.
1585        It assumes the observation to have the same numer of
1586        "ons" and "offs"
1587        Parameters:
1588            preserve:       you can preserve (default) the continuum or
1589                            remove it.  The equations used are
1590                            preserve: Output = Toff * (on/off) - Toff
1591                            remove:   Output = Toff * (on/off) - Ton
1592            mode:           the on/off detection mode
1593                            'paired' (default)
1594                            identifies 'off' scans by the
1595                            trailing '_R' (Mopra/Parkes) or
1596                            '_e'/'_w' (Tid) and matches
1597                            on/off pairs from the observing pattern
1598                            'time'
1599                            finds the closest off in time
1600
1601        """
1602        modes = ["time", "paired"]
1603        if not mode in modes:
1604            msg = "please provide valid mode. Valid modes are %s" % (modes)
1605            raise ValueError(msg)
1606        varlist = vars()
1607        s = None
1608        if mode.lower() == "paired":
1609            basesel = self.get_selection()
1610            sel = selector()+basesel
1611            sel.set_query("SRCTYPE==1")
1612            self.set_selection(sel)
1613            offs = self.copy()
1614            sel.set_query("SRCTYPE==0")
1615            self.set_selection(sel)
1616            ons = self.copy()
1617            s = scantable(self._math._quotient(ons, offs, preserve))
1618            self.set_selection(basesel)
1619        elif mode.lower() == "time":
1620            s = scantable(self._math._auto_quotient(self, mode, preserve))
1621        s._add_history("auto_quotient", varlist)
1622        print_log()
1623        return s
1624
1625    def mx_quotient(self, mask = None, weight='median', preserve=True):
1626        """
1627        Form a quotient using "off" beams when observing in "MX" mode.
1628        Parameters:
1629            mask:           an optional mask to be used when weight == 'stddev'
1630            weight:         How to average the off beams.  Default is 'median'.
1631            preserve:       you can preserve (default) the continuum or
1632                            remove it.  The equations used are
1633                            preserve: Output = Toff * (on/off) - Toff
1634                            remove:   Output = Toff * (on/off) - Ton
1635        """
1636        if mask is None: mask = ()
1637        varlist = vars()
1638        on = scantable(self._math._mx_extract(self, 'on'))
1639        preoff = scantable(self._math._mx_extract(self, 'off'))
1640        off = preoff.average_time(mask=mask, weight=weight, scanav=False)
1641        from asapmath  import quotient
1642        q = quotient(on, off, preserve)
1643        q._add_history("mx_quotient", varlist)
1644        print_log()
1645        return q
1646
1647    def freq_switch(self, insitu=None):
1648        """
1649        Apply frequency switching to the data.
1650        Parameters:
1651            insitu:      if False a new scantable is returned.
1652                         Otherwise, the swictching is done in-situ
1653                         The default is taken from .asaprc (False)
1654        Example:
1655            none
1656        """
1657        if insitu is None: insitu = rcParams['insitu']
1658        self._math._setinsitu(insitu)
1659        varlist = vars()
1660        s = scantable(self._math._freqswitch(self))
1661        s._add_history("freq_switch", varlist)
1662        print_log()
1663        if insitu: self._assign(s)
1664        else: return s
1665
1666    def recalc_azel(self):
1667        """
1668        Recalculate the azimuth and elevation for each position.
1669        Parameters:
1670            none
1671        Example:
1672        """
1673        varlist = vars()
1674        self._recalcazel()
1675        self._add_history("recalc_azel", varlist)
1676        print_log()
1677        return
1678
1679    def __add__(self, other):
1680        varlist = vars()
1681        s = None
1682        if isinstance(other, scantable):
1683            s = scantable(self._math._binaryop(self, other, "ADD"))
1684        elif isinstance(other, float):
1685            s = scantable(self._math._unaryop(self, other, "ADD", False))
1686        else:
1687            raise TypeError("Other input is not a scantable or float value")
1688        s._add_history("operator +", varlist)
1689        print_log()
1690        return s
1691
1692    def __sub__(self, other):
1693        """
1694        implicit on all axes and on Tsys
1695        """
1696        varlist = vars()
1697        s = None
1698        if isinstance(other, scantable):
1699            s = scantable(self._math._binaryop(self, other, "SUB"))
1700        elif isinstance(other, float):
1701            s = scantable(self._math._unaryop(self, other, "SUB", False))
1702        else:
1703            raise TypeError("Other input is not a scantable or float value")
1704        s._add_history("operator -", varlist)
1705        print_log()
1706        return s
1707
1708    def __mul__(self, other):
1709        """
1710        implicit on all axes and on Tsys
1711        """
1712        varlist = vars()
1713        s = None
1714        if isinstance(other, scantable):
1715            s = scantable(self._math._binaryop(self, other, "MUL"))
1716        elif isinstance(other, float):
1717            s = scantable(self._math._unaryop(self, other, "MUL", False))
1718        else:
1719            raise TypeError("Other input is not a scantable or float value")
1720        s._add_history("operator *", varlist)
1721        print_log()
1722        return s
1723
1724
1725    def __div__(self, other):
1726        """
1727        implicit on all axes and on Tsys
1728        """
1729        varlist = vars()
1730        s = None
1731        if isinstance(other, scantable):
1732            s = scantable(self._math._binaryop(self, other, "DIV"))
1733        elif isinstance(other, float):
1734            if other == 0.0:
1735                raise ZeroDivisionError("Dividing by zero is not recommended")
1736            s = scantable(self._math._unaryop(self, other, "DIV", False))
1737        else:
1738            raise TypeError("Other input is not a scantable or float value")
1739        s._add_history("operator /", varlist)
1740        print_log()
1741        return s
1742
1743    def get_fit(self, row=0):
1744        """
1745        Print or return the stored fits for a row in the scantable
1746        Parameters:
1747            row:    the row which the fit has been applied to.
1748        """
1749        if row > self.nrow():
1750            return
1751        from asap.asapfit import asapfit
1752        fit = asapfit(self._getfit(row))
1753        if rcParams['verbose']:
1754            print fit
1755            return
1756        else:
1757            return fit.as_dict()
1758
1759    def flag_nans(self):
1760        """
1761        Utility function to flag NaN values in the scantable.
1762        """
1763        import numpy
1764        basesel = self.get_selection()
1765        for i in range(self.nrow()):
1766            sel = selector()+basesel
1767            sel.set_scans(self.getscan(i))
1768            sel.set_beams(self.getbeam(i))
1769            sel.set_ifs(self.getif(i))
1770            sel.set_polarisations(self.getpol(i))
1771            self.set_selection(sel)
1772            nans = numpy.isnan(self._getspectrum(0))
1773        if numpy.any(nans):
1774            bnans = [ bool(v) for v in nans]
1775            self.flag(bnans)
1776        self.set_selection(basesel)
1777
1778
1779    def _add_history(self, funcname, parameters):
1780        if not rcParams['scantable.history']:
1781            return
1782        # create date
1783        sep = "##"
1784        from datetime import datetime
1785        dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1786        hist = dstr+sep
1787        hist += funcname+sep#cdate+sep
1788        if parameters.has_key('self'): del parameters['self']
1789        for k, v in parameters.iteritems():
1790            if type(v) is dict:
1791                for k2, v2 in v.iteritems():
1792                    hist += k2
1793                    hist += "="
1794                    if isinstance(v2, scantable):
1795                        hist += 'scantable'
1796                    elif k2 == 'mask':
1797                        if isinstance(v2, list) or isinstance(v2, tuple):
1798                            hist += str(self._zip_mask(v2))
1799                        else:
1800                            hist += str(v2)
1801                    else:
1802                        hist += str(v2)
1803            else:
1804                hist += k
1805                hist += "="
1806                if isinstance(v, scantable):
1807                    hist += 'scantable'
1808                elif k == 'mask':
1809                    if isinstance(v, list) or isinstance(v, tuple):
1810                        hist += str(self._zip_mask(v))
1811                    else:
1812                        hist += str(v)
1813                else:
1814                    hist += str(v)
1815            hist += sep
1816        hist = hist[:-2] # remove trailing '##'
1817        self._addhistory(hist)
1818
1819
1820    def _zip_mask(self, mask):
1821        mask = list(mask)
1822        i = 0
1823        segments = []
1824        while mask[i:].count(1):
1825            i += mask[i:].index(1)
1826            if mask[i:].count(0):
1827                j = i + mask[i:].index(0)
1828            else:
1829                j = len(mask)
1830            segments.append([i, j])
1831            i = j
1832        return segments
1833
1834    def _get_ordinate_label(self):
1835        fu = "("+self.get_fluxunit()+")"
1836        import re
1837        lbl = "Intensity"
1838        if re.match(".K.", fu):
1839            lbl = "Brightness Temperature "+ fu
1840        elif re.match(".Jy.", fu):
1841            lbl = "Flux density "+ fu
1842        return lbl
1843
1844    def _check_ifs(self):
1845        nchans = [self.nchan(i) for i in range(self.nif(-1))]
1846        nchans = filter(lambda t: t > 0, nchans)
1847        return (sum(nchans)/len(nchans) == nchans[0])
1848
1849    def _fill(self, names, unit, average):
1850        import os
1851        from asap._asap import stfiller
1852        first = True
1853        fullnames = []
1854        for name in names:
1855            name = os.path.expandvars(name)
1856            name = os.path.expanduser(name)
1857            if not os.path.exists(name):
1858                msg = "File '%s' does not exists" % (name)
1859                if rcParams['verbose']:
1860                    asaplog.push(msg)
1861                    print asaplog.pop().strip()
1862                    return
1863                raise IOError(msg)
1864            fullnames.append(name)
1865        if average:
1866            asaplog.push('Auto averaging integrations')
1867        stype = int(rcParams['scantable.storage'].lower() == 'disk')
1868        for name in fullnames:
1869            tbl = Scantable(stype)
1870            r = stfiller(tbl)
1871            rx = rcParams['scantable.reference']
1872            r._setreferenceexpr(rx)
1873            msg = "Importing %s..." % (name)
1874            asaplog.push(msg, False)
1875            print_log()
1876            r._open(name, -1, -1)
1877            r._read()
1878            if average:
1879                tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
1880            if not first:
1881                tbl = self._math._merge([self, tbl])
1882            Scantable.__init__(self, tbl)
1883            r._close()
1884            del r, tbl
1885            first = False
1886        if unit is not None:
1887            self.set_fluxunit(unit)
1888        self.set_freqframe(rcParams['scantable.freqframe'])
1889
1890    def __getitem__(self, key):
1891        if key < 0:
1892            key += self.nrow()
1893        if key >= self.nrow():
1894            raise IndexError("Row index out of range.")
1895        return self._getspectrum(key)
1896
1897    def __setitem__(self, key, value):
1898        if key < 0:
1899            key += self.nrow()
1900        if key >= self.nrow():
1901            raise IndexError("Row index out of range.")
1902        if not hasattr(value, "__len__") or \
1903                len(value) > self.nchan(self.getif(key)):
1904            raise ValueError("Spectrum length doesn't match.")
1905        return self._setspectrum(value, key)
1906
1907    def __len__(self):
1908        return self.nrow()
1909
1910    def __iter__(self):
1911        for i in range(len(self)):
1912            yield self[i]
Note: See TracBrowser for help on using the repository browser.