source: trunk/python/scantable.py @ 1600

Last change on this file since 1600 was 1600, checked in by Malte Marquarding, 15 years ago

Ticket #170: python derived class for nice access e.g. units and doc strings

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 70.1 KB
Line 
1from asap._asap import Scantable
2from asap import rcParams
3from asap import print_log, print_log_dec
4from asap import asaplog
5from asap import selector
6from asap import linecatalog
7from asap.coordinate import coordinate
8from asap import _n_bools, mask_not, mask_and, mask_or
9
10class scantable(Scantable):
11    """
12        The ASAP container for scans
13    """
14    @print_log_dec
15    def __init__(self, filename, average=None, unit=None, parallactify=None):
16        """
17        Create a scantable from a saved one or make a reference
18        Parameters:
19            filename:    the name of an asap table on disk
20                         or
21                         the name of a rpfits/sdfits/ms file
22                         (integrations within scans are auto averaged
23                         and the whole file is read)
24                         or
25                         [advanced] a reference to an existing
26                         scantable
27            average:     average all integrations withinb a scan on read.
28                         The default (True) is taken from .asaprc.
29            unit:         brightness unit; must be consistent with K or Jy.
30                         Over-rides the default selected by the reader
31                         (input rpfits/sdfits/ms) or replaces the value
32                         in existing scantables
33            parallactify: Indcicate that the data had been parallatified.
34                          Default is taken form rc file.
35        """
36        if average is None:
37            average = rcParams['scantable.autoaverage']
38        parallactify = parallactify or rcParams['scantable.parallactify']
39        varlist = vars()
40        from asap._asap import stmath
41        self._math = stmath()
42        if isinstance(filename, Scantable):
43            Scantable.__init__(self, filename)
44        else:
45            if isinstance(filename, str):# or \
46#                (isinstance(filename, list) or isinstance(filename, tuple)) \
47#                  and isinstance(filename[-1], str):
48                import os.path
49                filename = os.path.expandvars(filename)
50                filename = os.path.expanduser(filename)
51                if not os.path.exists(filename):
52                    s = "File '%s' not found." % (filename)
53                    if rcParams['verbose']:
54                        asaplog.push(s)
55                        #print asaplog.pop().strip()
56                        return
57                    raise IOError(s)
58                if os.path.isdir(filename) \
59                    and not os.path.exists(filename+'/table.f1'):
60                    # crude check if asap table
61                    if os.path.exists(filename+'/table.info'):
62                        ondisk = rcParams['scantable.storage'] == 'disk'
63                        Scantable.__init__(self, filename, ondisk)
64                        if unit is not None:
65                            self.set_fluxunit(unit)
66                        self.set_freqframe(rcParams['scantable.freqframe'])
67                    else:
68                        msg = "The given file '%s'is not a valid " \
69                              "asap table." % (filename)
70                        if rcParams['verbose']:
71                            print msg
72                            return
73                        else:
74                            raise IOError(msg)
75                else:
76                    self._fill([filename], unit, average)
77            elif (isinstance(filename, list) or isinstance(filename, tuple)) \
78                  and isinstance(filename[-1], str):
79                self._fill(filename, unit, average)
80        self.parallactify(parallactify)
81        self._add_history("scantable", varlist)
82
83    @print_log_dec
84    def save(self, name=None, format=None, overwrite=False):
85        """
86        Store the scantable on disk. This can be an asap (aips++) Table,
87        SDFITS or MS2 format.
88        Parameters:
89            name:        the name of the outputfile. For format "ASCII"
90                         this is the root file name (data in 'name'.txt
91                         and header in 'name'_header.txt)
92            format:      an optional file format. Default is ASAP.
93                         Allowed are - 'ASAP' (save as ASAP [aips++] Table),
94                                       'SDFITS' (save as SDFITS file)
95                                       'ASCII' (saves as ascii text file)
96                                       'MS2' (saves as an aips++
97                                              MeasurementSet V2)
98                                       'FITS' (save as image FITS - not
99                                               readable by class)
100                                       'CLASS' (save as FITS readable by CLASS)
101            overwrite:   If the file should be overwritten if it exists.
102                         The default False is to return with warning
103                         without writing the output. USE WITH CARE.
104        Example:
105            scan.save('myscan.asap')
106            scan.save('myscan.sdfits', 'SDFITS')
107        """
108        from os import path
109        format = format or rcParams['scantable.save']
110        suffix = '.'+format.lower()
111        if name is None or name == "":
112            name = 'scantable'+suffix
113            msg = "No filename given. Using default name %s..." % name
114            asaplog.push(msg)
115        name = path.expandvars(name)
116        if path.isfile(name) or path.isdir(name):
117            if not overwrite:
118                msg = "File %s exists." % name
119                if rcParams['verbose']:
120                    print msg
121                    return
122                else:
123                    raise IOError(msg)
124        format2 = format.upper()
125        if format2 == 'ASAP':
126            self._save(name)
127        else:
128            from asap._asap import stwriter as stw
129            writer = stw(format2)
130            writer.write(self, name)
131        return
132
133    def copy(self):
134        """
135        Return a copy of this scantable.
136        Note:
137            This makes a full (deep) copy. scan2 = scan1 makes a reference.
138        Parameters:
139            none
140        Example:
141            copiedscan = scan.copy()
142        """
143        sd = scantable(Scantable._copy(self))
144        return sd
145
146    def drop_scan(self, scanid=None):
147        """
148        Return a new scantable where the specified scan number(s) has(have)
149        been dropped.
150        Parameters:
151            scanid:    a (list of) scan number(s)
152        """
153        from asap import _is_sequence_or_number as _is_valid
154        from asap import _to_list
155        from asap import unique
156        if not _is_valid(scanid):
157            if rcParams['verbose']:
158                print "Please specify a scanno to drop from the scantable"
159                return
160            else:
161                raise RuntimeError("No scan given")
162        try:
163            scanid = _to_list(scanid)
164            allscans = unique([ self.getscan(i) for i in range(self.nrow())])
165            for sid in scanid: allscans.remove(sid)
166            if len(allscans) == 0:
167                raise ValueError("Can't remove all scans")
168        except ValueError:
169            if rcParams['verbose']:
170                print "Couldn't find any match."
171                return
172            else: raise
173        try:
174            sel = selector(scans=allscans)
175            return self._select_copy(sel)
176        except RuntimeError:
177            if rcParams['verbose']:
178                print "Couldn't find any match."
179            else:
180                raise
181
182    def _select_copy(self, selection):
183        orig = self.get_selection()
184        self.set_selection(orig+selection)
185        cp = self.copy()
186        self.set_selection(orig)
187        return cp
188
189    def get_scan(self, scanid=None):
190        """
191        Return a specific scan (by scanno) or collection of scans (by
192        source name) in a new scantable.
193        Note:
194            See scantable.drop_scan() for the inverse operation.
195        Parameters:
196            scanid:    a (list of) scanno or a source name, unix-style
197                       patterns are accepted for source name matching, e.g.
198                       '*_R' gets all 'ref scans
199        Example:
200            # get all scans containing the source '323p459'
201            newscan = scan.get_scan('323p459')
202            # get all 'off' scans
203            refscans = scan.get_scan('*_R')
204            # get a susbset of scans by scanno (as listed in scan.summary())
205            newscan = scan.get_scan([0, 2, 7, 10])
206        """
207        if scanid is None:
208            if rcParams['verbose']:
209                print "Please specify a scan no or name to " \
210                      "retrieve from the scantable"
211                return
212            else:
213                raise RuntimeError("No scan given")
214
215        try:
216            bsel = self.get_selection()
217            sel = selector()
218            if type(scanid) is str:
219                sel.set_name(scanid)
220                return self._select_copy(sel)
221            elif type(scanid) is int:
222                sel.set_scans([scanid])
223                return self._select_copy(sel)
224            elif type(scanid) is list:
225                sel.set_scans(scanid)
226                return self._select_copy(sel)
227            else:
228                msg = "Illegal scanid type, use 'int' or 'list' if ints."
229                if rcParams['verbose']:
230                    print msg
231                else:
232                    raise TypeError(msg)
233        except RuntimeError:
234            if rcParams['verbose']: print "Couldn't find any match."
235            else: raise
236
237    def __str__(self):
238        return Scantable._summary(self, True)
239
240    def summary(self, filename=None):
241        """
242        Print a summary of the contents of this scantable.
243        Parameters:
244            filename:    the name of a file to write the putput to
245                         Default - no file output
246            verbose:     print extra info such as the frequency table
247                         The default (False) is taken from .asaprc
248        """
249        info = Scantable._summary(self, True)
250        if filename is not None:
251            if filename is "":
252                filename = 'scantable_summary.txt'
253            from os.path import expandvars, isdir
254            filename = expandvars(filename)
255            if not isdir(filename):
256                data = open(filename, 'w')
257                data.write(info)
258                data.close()
259            else:
260                msg = "Illegal file name '%s'." % (filename)
261                if rcParams['verbose']:
262                    print msg
263                else:
264                    raise IOError(msg)
265        if rcParams['verbose']:
266            try:
267                from IPython.genutils import page as pager
268            except ImportError:
269                from pydoc import pager
270            pager(info)
271        else:
272            return info
273
274    def get_spectrum(self, rowno):
275        """Return the spectrum for the current row in the scantable as a list.
276        Parameters:
277             rowno:   the row number to retrieve the spectrum from
278        """
279        return self._getspectrum(rowno)
280
281    def get_mask(self, rowno):
282        """Return the mask for the current row in the scantable as a list.
283        Parameters:
284             rowno:   the row number to retrieve the mask from
285        """
286        return self._getmask(rowno)
287
288    def set_spectrum(self, spec, rowno):
289        """Return the spectrum for the current row in the scantable as a list.
290        Parameters:
291             spec:   the spectrum
292             rowno:    the row number to set the spectrum for
293        """
294        assert(len(spec) == self.nchan())
295        return self._setspectrum(spec, rowno)
296
297    def get_coordinate(self, rowno):
298        """Return the (spectral) coordinate for a a given 'rowno'.
299        NOTE:
300            * This coordinate is only valid until a scantable method modifies
301              the frequency axis.
302            * This coordinate does contain the original frequency set-up
303              NOT the new frame. The conversions however are done using the user
304              specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
305              use scantable.freq_align first. Without it there is no closure,
306              i.e.
307              c = myscan.get_coordinate(0)
308              c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
309
310        Parameters:
311             rowno:    the row number for the spectral coordinate
312
313        """
314        return coordinate(Scantable.get_coordinate(self, rowno))
315
316    def get_selection(self):
317        """
318        Get the selection object currently set on this scantable.
319        Parameters:
320            none
321        Example:
322            sel = scan.get_selection()
323            sel.set_ifs(0)              # select IF 0
324            scan.set_selection(sel)     # apply modified selection
325        """
326        return selector(self._getselection())
327
328    def set_selection(self, selection=None, **kw):
329        """
330        Select a subset of the data. All following operations on this scantable
331        are only applied to thi selection.
332        Parameters:
333            selection:    a selector object (default unset the selection)
334        Examples:
335            sel = selector()         # create a selection object
336            self.set_scans([0, 3])    # select SCANNO 0 and 3
337            scan.set_selection(sel)  # set the selection
338            scan.summary()           # will only print summary of scanno 0 an 3
339            scan.set_selection()     # unset the selection
340        """
341        if selection is None:
342            # reset
343            if len(kw) == 0:
344                selection = selector()
345            else:
346                # try keywords
347                for k in kw:
348                    if k not in selector.fields:
349                        raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
350                selection = selector(**kw)
351        self._setselection(selection)
352
353    def stats(self, stat='stddev', mask=None):
354        """
355        Determine the specified statistic of the current beam/if/pol
356        Takes a 'mask' as an optional parameter to specify which
357        channels should be excluded.
358        Parameters:
359            stat:    'min', 'max', 'sumsq', 'sum', 'mean'
360                     'var', 'stddev', 'avdev', 'rms', 'median'
361            mask:    an optional mask specifying where the statistic
362                     should be determined.
363        Example:
364            scan.set_unit('channel')
365            msk = scan.create_mask([100, 200], [500, 600])
366            scan.stats(stat='mean', mask=m)
367        """
368        mask = mask or []
369        if not self._check_ifs():
370            raise ValueError("Cannot apply mask as the IFs have different "
371                             "number of channels. Please use setselection() "
372                             "to select individual IFs")
373
374        statvals = self._math._stats(self, mask, stat)
375        def cb(i):
376            return statvals[i]
377
378        return self._row_callback(cb, stat)
379
380    def stddev(self, mask=None):
381        """
382        Determine the standard deviation of the current beam/if/pol
383        Takes a 'mask' as an optional parameter to specify which
384        channels should be excluded.
385        Parameters:
386            mask:    an optional mask specifying where the standard
387                     deviation should be determined.
388
389        Example:
390            scan.set_unit('channel')
391            msk = scan.create_mask([100, 200], [500, 600])
392            scan.stddev(mask=m)
393        """
394        return self.stats(stat='stddev', mask=mask);
395
396
397    def get_column_names(self):
398        """
399        Return a  list of column names, which can be used for selection.
400        """
401        return list(Scantable.get_column_names(self))
402
403    def get_tsys(self):
404        """
405        Return the System temperatures.
406        Returns:
407            a list of Tsys values for the current selection
408        """
409
410        return self._row_callback(self._gettsys, "Tsys")
411
412    def _row_callback(self, callback, label):
413        out = ""
414        outvec = []
415        sep = '-'*50
416        for i in range(self.nrow()):
417            tm = self._gettime(i)
418            src = self._getsourcename(i)
419            out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
420            out += 'Time[%s]:\n' % (tm)
421            if self.nbeam(-1) > 1:
422                out +=  ' Beam[%d] ' % (self.getbeam(i))
423            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (self.getif(i))
424            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (self.getpol(i))
425            outvec.append(callback(i))
426            out += '= %3.3f\n' % (outvec[i])
427            out +=  sep+'\n'
428        if rcParams['verbose']:
429            print sep
430            print " %s" % (label)
431            print sep
432            print out
433        return outvec
434
435    def _get_column(self, callback, row=-1):
436        """
437        """
438        if row == -1:
439            return [callback(i) for i in range(self.nrow())]
440        else:
441            if  0 <= row < self.nrow():
442                return callback(row)
443
444
445    def get_time(self, row=-1, asdatetime=False):
446        """
447        Get a list of time stamps for the observations.
448        Return a datetime object for each integration time stamp in the scantable.
449        Parameters:
450            row:          row no of integration. Default -1 return all rows
451            asdatetime:   return values as datetime objects rather than strings
452        Example:
453            none
454        """
455        from time import strptime
456        from datetime import datetime
457        times = self._get_column(self._gettime, row)
458        if not asdatetime:
459            return times
460        format = "%Y/%m/%d/%H:%M:%S"
461        if isinstance(times, list):
462            return [datetime(*strptime(i, format)[:6]) for i in times]
463        else:
464            return datetime(*strptime(times, format)[:6])
465
466
467    def get_inttime(self, row=-1):
468        """
469        Get a list of integration times for the observations.
470        Return a time in seconds for each integration in the scantable.
471        Parameters:
472            row:    row no of integration. Default -1 return all rows.
473        Example:
474            none
475        """
476        return self._get_column(self._getinttime, row)
477
478
479    def get_sourcename(self, row=-1):
480        """
481        Get a list source names for the observations.
482        Return a string for each integration in the scantable.
483        Parameters:
484            row:    row no of integration. Default -1 return all rows.
485        Example:
486            none
487        """
488        return self._get_column(self._getsourcename, row)
489
490    def get_elevation(self, row=-1):
491        """
492        Get a list of elevations for the observations.
493        Return a float for each integration in the scantable.
494        Parameters:
495            row:    row no of integration. Default -1 return all rows.
496        Example:
497            none
498        """
499        return self._get_column(self._getelevation, row)
500
501    def get_azimuth(self, row=-1):
502        """
503        Get a list of azimuths for the observations.
504        Return a float for each integration in the scantable.
505        Parameters:
506            row:    row no of integration. Default -1 return all rows.
507        Example:
508            none
509        """
510        return self._get_column(self._getazimuth, row)
511
512    def get_parangle(self, row=-1):
513        """
514        Get a list of parallactic angles for the observations.
515        Return a float for each integration in the scantable.
516        Parameters:
517            row:    row no of integration. Default -1 return all rows.
518        Example:
519            none
520        """
521        return self._get_column(self._getparangle, row)
522
523    def get_direction(self, row=-1):
524        """
525        Get a list of Positions on the sky (direction) for the observations.
526        Return a string for each integration in the scantable.
527        Parameters:
528            row:    row no of integration. Default -1 return all rows
529        Example:
530            none
531        """
532        return self._get_column(self._getdirection, row)
533
534    def get_directionval(self, row=-1):
535        """
536        Get a list of Positions on the sky (direction) for the observations.
537        Return a float for each integration in the scantable.
538        Parameters:
539            row:    row no of integration. Default -1 return all rows
540        Example:
541            none
542        """
543        return self._get_column(self._getdirectionvec, row)
544
545
546    def set_unit(self, unit='channel'):
547        """
548        Set the unit for all following operations on this scantable
549        Parameters:
550            unit:    optional unit, default is 'channel'
551                     one of '*Hz', 'km/s', 'channel', ''
552        """
553        varlist = vars()
554        if unit in ['', 'pixel', 'channel']:
555            unit = ''
556        inf = list(self._getcoordinfo())
557        inf[0] = unit
558        self._setcoordinfo(inf)
559        self._add_history("set_unit", varlist)
560
561    @print_log_dec
562    def set_instrument(self, instr):
563        """
564        Set the instrument for subsequent processing.
565        Parameters:
566            instr:    Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
567                      'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
568        """
569        self._setInstrument(instr)
570        self._add_history("set_instument", vars())
571
572    @print_log_dec
573    def set_feedtype(self, feedtype):
574        """
575        Overwrite the feed type, which might not be set correctly.
576        Parameters:
577            feedtype:     'linear' or 'circular'
578        """
579        self._setfeedtype(feedtype)
580        self._add_history("set_feedtype", vars())
581
582    @print_log_dec
583    def set_doppler(self, doppler='RADIO'):
584        """
585        Set the doppler for all following operations on this scantable.
586        Parameters:
587            doppler:    One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
588        """
589        varlist = vars()
590        inf = list(self._getcoordinfo())
591        inf[2] = doppler
592        self._setcoordinfo(inf)
593        self._add_history("set_doppler", vars())
594
595    @print_log_dec
596    def set_freqframe(self, frame=None):
597        """
598        Set the frame type of the Spectral Axis.
599        Parameters:
600            frame:   an optional frame type, default 'LSRK'. Valid frames are:
601                     'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
602                     'GEO', 'GALACTO', 'LGROUP', 'CMB'
603        Examples:
604            scan.set_freqframe('BARY')
605        """
606        frame = frame or rcParams['scantable.freqframe']
607        varlist = vars()
608        valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
609                   'GEO', 'GALACTO', 'LGROUP', 'CMB']
610
611        if frame in valid:
612            inf = list(self._getcoordinfo())
613            inf[1] = frame
614            self._setcoordinfo(inf)
615            self._add_history("set_freqframe", varlist)
616        else:
617            msg  = "Please specify a valid freq type. Valid types are:\n", valid
618            if rcParams['verbose']:
619                print msg
620            else:
621                raise TypeError(msg)
622
623    def set_dirframe(self, frame=""):
624        """
625        Set the frame type of the Direction on the sky.
626        Parameters:
627            frame:   an optional frame type, default ''. Valid frames are:
628                     'J2000', 'B1950', 'GALACTIC'
629        Examples:
630            scan.set_dirframe('GALACTIC')
631        """
632        varlist = vars()
633        try:
634            Scantable.set_dirframe(self, frame)
635        except RuntimeError, msg:
636            if rcParams['verbose']:
637                print msg
638            else:
639                raise
640        self._add_history("set_dirframe", varlist)
641
642    def get_unit(self):
643        """
644        Get the default unit set in this scantable
645        Returns:
646            A unit string
647        """
648        inf = self._getcoordinfo()
649        unit = inf[0]
650        if unit == '': unit = 'channel'
651        return unit
652
653    def get_abcissa(self, rowno=0):
654        """
655        Get the abcissa in the current coordinate setup for the currently
656        selected Beam/IF/Pol
657        Parameters:
658            rowno:    an optional row number in the scantable. Default is the
659                      first row, i.e. rowno=0
660        Returns:
661            The abcissa values and the format string (as a dictionary)
662        """
663        abc = self._getabcissa(rowno)
664        lbl = self._getabcissalabel(rowno)
665        return abc, lbl
666
667    def flag(self, mask=None):
668        """
669        Flag the selected data using an optional channel mask.
670        Parameters:
671            mask:   an optional channel mask, created with create_mask. Default
672                    (no mask) is all channels.
673        """
674        varlist = vars()
675        mask = mask or []
676        try:
677            self._flag(mask)
678        except RuntimeError, msg:
679            if rcParams['verbose']:
680                print msg
681                return
682            else: raise
683        self._add_history("flag", varlist)
684
685    @print_log_dec
686    def lag_flag(self, start, end, unit="MHz", insitu=None):
687        """
688        Flag the data in 'lag' space by providing a frequency to remove.
689        Flagged data in the scantable gets interpolated over the region.
690        No taper is applied.
691        Parameters:
692            start:    the start frequency (really a period within the
693                      bandwidth)  or period to remove
694            end:      the end frequency or period to remove
695            unit:     the frequency unit (default "MHz") or "" for
696                      explicit lag channels
697        Notes:
698            It is recommended to flag edges of the band or strong
699            signals beforehand.
700        """
701        if insitu is None: insitu = rcParams['insitu']
702        self._math._setinsitu(insitu)
703        varlist = vars()
704        base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
705        if not (unit == "" or base.has_key(unit)):
706            raise ValueError("%s is not a valid unit." % unit)
707        try:
708            if unit == "":
709                s = scantable(self._math._lag_flag(self, start, end, "lags"))
710            else:
711                s = scantable(self._math._lag_flag(self, start*base[unit],
712                                                   end*base[unit], "frequency"))
713        except RuntimeError, msg:
714            if rcParams['verbose']:
715                print msg
716                return
717            else: raise
718        s._add_history("lag_flag", varlist)
719        if insitu:
720            self._assign(s)
721        else:
722            return s
723
724    @print_log_dec
725    def create_mask(self, *args, **kwargs):
726        """
727        Compute and return a mask based on [min, max] windows.
728        The specified windows are to be INCLUDED, when the mask is
729        applied.
730        Parameters:
731            [min, max], [min2, max2], ...
732                Pairs of start/end points (inclusive)specifying the regions
733                to be masked
734            invert:     optional argument. If specified as True,
735                        return an inverted mask, i.e. the regions
736                        specified are EXCLUDED
737            row:        create the mask using the specified row for
738                        unit conversions, default is row=0
739                        only necessary if frequency varies over rows.
740        Example:
741            scan.set_unit('channel')
742            a)
743            msk = scan.create_mask([400, 500], [800, 900])
744            # masks everything outside 400 and 500
745            # and 800 and 900 in the unit 'channel'
746
747            b)
748            msk = scan.create_mask([400, 500], [800, 900], invert=True)
749            # masks the regions between 400 and 500
750            # and 800 and 900 in the unit 'channel'
751            c)
752            mask only channel 400
753            msk =  scan.create_mask([400])
754        """
755        row = kwargs.get("row", 0)
756        data = self._getabcissa(row)
757        u = self._getcoordinfo()[0]
758        if rcParams['verbose']:
759            if u == "": u = "channel"
760            msg = "The current mask window unit is %s" % u
761            i = self._check_ifs()
762            if not i:
763                msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
764            asaplog.push(msg)
765        n = self.nchan()
766        msk = _n_bools(n, False)
767        # test if args is a 'list' or a 'normal *args - UGLY!!!
768
769        ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
770             and args or args[0]
771        for window in ws:
772            if len(window) == 1:
773                window = [window[0], window[0]]
774            if len(window) == 0 or  len(window) > 2:
775                raise ValueError("A window needs to be defined as [start(, end)]")
776            if window[0] > window[1]:
777                tmp = window[0]
778                window[0] = window[1]
779                window[1] = tmp
780            for i in range(n):
781                if data[i] >= window[0] and data[i] <= window[1]:
782                    msk[i] = True
783        if kwargs.has_key('invert'):
784            if kwargs.get('invert'):
785                msk = mask_not(msk)
786        return msk
787
788    def get_restfreqs(self):
789        """
790        Get the restfrequency(s) stored in this scantable.
791        The return value(s) are always of unit 'Hz'
792        Parameters:
793            none
794        Returns:
795            a list of doubles
796        """
797        return list(self._getrestfreqs())
798
799
800    def set_restfreqs(self, freqs=None, unit='Hz'):
801        """
802        Set or replace the restfrequency specified and
803        If the 'freqs' argument holds a scalar,
804        then that rest frequency will be applied to all the selected
805        data.  If the 'freqs' argument holds
806        a vector, then it MUST be of equal or smaller length than
807        the number of IFs (and the available restfrequencies will be
808        replaced by this vector).  In this case, *all* data have
809        the restfrequency set per IF according
810        to the corresponding value you give in the 'freqs' vector.
811        E.g. 'freqs=[1e9, 2e9]'  would mean IF 0 gets restfreq 1e9 and
812        IF 1 gets restfreq 2e9.
813        You can also specify the frequencies via a linecatalog.
814
815        Parameters:
816            freqs:   list of rest frequency values or string idenitfiers
817            unit:    unit for rest frequency (default 'Hz')
818
819        Example:
820            # set the given restfrequency for the whole table
821            scan.set_restfreqs(freqs=1.4e9)
822            # If thee number of IFs in the data is >= 2 IF0 gets the first
823            # value IF1 the second...
824            scan.set_restfreqs(freqs=[1.4e9, 1.67e9])
825            #set the given restfrequency for the whole table (by name)
826            scan.set_restfreqs(freqs="OH1667")
827
828        Note:
829            To do more sophisticate Restfrequency setting, e.g. on a
830            source and IF basis, use scantable.set_selection() before using
831            this function.
832            # provide your scantable is called scan
833            selection = selector()
834            selection.set_name("ORION*")
835            selection.set_ifs([1])
836            scan.set_selection(selection)
837            scan.set_restfreqs(freqs=86.6e9)
838
839        """
840        varlist = vars()
841        from asap import linecatalog
842        # simple  value
843        if isinstance(freqs, int) or isinstance(freqs, float):
844            self._setrestfreqs(freqs, "",unit)
845        # list of values
846        elif isinstance(freqs, list) or isinstance(freqs, tuple):
847            # list values are scalars
848            if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
849                sel = selector()
850                savesel = self._getselection()
851                iflist = self.getifnos()
852                for i in xrange(len(freqs)):
853                    sel.set_ifs(iflist[i])
854                    self._setselection(sel)
855                    self._setrestfreqs(freqs[i], "",unit)
856                self._setselection(savesel)
857            # list values are tuples, (value, name)
858            elif isinstance(freqs[-1], dict):
859                sel = selector()
860                savesel = self._getselection()
861                iflist = self.getifnos()
862                for i in xrange(len(freqs)):
863                    sel.set_ifs(iflist[i])
864                    self._setselection(sel)
865                    self._setrestfreqs(freqs[i]["value"],
866                                       freqs[i]["name"], "MHz")
867                self._setselection(savesel)
868        # freqs are to be taken from a linecatalog
869        elif isinstance(freqs, linecatalog):
870            sel = selector()
871            savesel = self._getselection()
872            iflist = self.getifnos()
873            for i in xrange(freqs.nrow()):
874                sel.set_ifs(iflist[i])
875                self._setselection(sel)
876                self._setrestfreqs(freqs.get_frequency(i),
877                                   freqs.get_name(i), "MHz")
878                # ensure that we are not iterating past nIF
879                if i == self.nif()-1: break
880            self._setselection(savesel)
881        else:
882            return
883        self._add_history("set_restfreqs", varlist)
884
885    def shift_refpix(self, delta):
886        """
887        Shift the reference pixel of the Spectra Coordinate by an
888        integer amount.
889        Parameters:
890            delta:   the amount to shift by
891        Note:
892            Be careful using this with broadband data.
893        """
894        Scantable.shift(self, delta)
895
896    def history(self, filename=None):
897        """
898        Print the history. Optionally to a file.
899        Parameters:
900            filename:    The name  of the file to save the history to.
901        """
902        hist = list(self._gethistory())
903        out = "-"*80
904        for h in hist:
905            if h.startswith("---"):
906                out += "\n"+h
907            else:
908                items = h.split("##")
909                date = items[0]
910                func = items[1]
911                items = items[2:]
912                out += "\n"+date+"\n"
913                out += "Function: %s\n  Parameters:" % (func)
914                for i in items:
915                    s = i.split("=")
916                    out += "\n   %s = %s" % (s[0], s[1])
917                out += "\n"+"-"*80
918        if filename is not None:
919            if filename is "":
920                filename = 'scantable_history.txt'
921            import os
922            filename = os.path.expandvars(os.path.expanduser(filename))
923            if not os.path.isdir(filename):
924                data = open(filename, 'w')
925                data.write(out)
926                data.close()
927            else:
928                msg = "Illegal file name '%s'." % (filename)
929                if rcParams['verbose']:
930                    print msg
931                else:
932                    raise IOError(msg)
933        if rcParams['verbose']:
934            try:
935                from IPython.genutils import page as pager
936            except ImportError:
937                from pydoc import pager
938            pager(out)
939        else:
940            return out
941        return
942    #
943    # Maths business
944    #
945    @print_log_dec
946    def average_time(self, mask=None, scanav=False, weight='tint', align=False):
947        """
948        Return the (time) weighted average of a scan.
949        Note:
950            in channels only - align if necessary
951        Parameters:
952            mask:     an optional mask (only used for 'var' and 'tsys'
953                      weighting)
954            scanav:   True averages each scan separately
955                      False (default) averages all scans together,
956            weight:   Weighting scheme.
957                      'none'     (mean no weight)
958                      'var'      (1/var(spec) weighted)
959                      'tsys'     (1/Tsys**2 weighted)
960                      'tint'     (integration time weighted)
961                      'tintsys'  (Tint/Tsys**2)
962                      'median'   ( median averaging)
963                      The default is 'tint'
964            align:    align the spectra in velocity before averaging. It takes
965                      the time of the first spectrum as reference time.
966        Example:
967            # time average the scantable without using a mask
968            newscan = scan.average_time()
969        """
970        varlist = vars()
971        weight = weight or 'TINT'
972        mask = mask or ()
973        scanav = (scanav and 'SCAN') or 'NONE'
974        scan = (self, )
975        try:
976            if align:
977                scan = (self.freq_align(insitu=False), )
978            s = None
979            if weight.upper() == 'MEDIAN':
980                s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
981                                                         scanav))
982            else:
983                s = scantable(self._math._average(scan, mask, weight.upper(),
984                              scanav))
985        except RuntimeError, msg:
986            if rcParams['verbose']:
987                print msg
988                return
989            else: raise
990        s._add_history("average_time", varlist)
991        return s
992
993    @print_log_dec
994    def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
995        """
996        Return a scan where all spectra are converted to either
997        Jansky or Kelvin depending upon the flux units of the scan table.
998        By default the function tries to look the values up internally.
999        If it can't find them (or if you want to over-ride), you must
1000        specify EITHER jyperk OR eta (and D which it will try to look up
1001        also if you don't set it). jyperk takes precedence if you set both.
1002        Parameters:
1003            jyperk:      the Jy / K conversion factor
1004            eta:         the aperture efficiency
1005            d:           the geomtric diameter (metres)
1006            insitu:      if False a new scantable is returned.
1007                         Otherwise, the scaling is done in-situ
1008                         The default is taken from .asaprc (False)
1009        """
1010        if insitu is None: insitu = rcParams['insitu']
1011        self._math._setinsitu(insitu)
1012        varlist = vars()
1013        jyperk = jyperk or -1.0
1014        d = d or -1.0
1015        eta = eta or -1.0
1016        s = scantable(self._math._convertflux(self, d, eta, jyperk))
1017        s._add_history("convert_flux", varlist)
1018        if insitu: self._assign(s)
1019        else: return s
1020
1021    @print_log_dec
1022    def gain_el(self, poly=None, filename="", method="linear", insitu=None):
1023        """
1024        Return a scan after applying a gain-elevation correction.
1025        The correction can be made via either a polynomial or a
1026        table-based interpolation (and extrapolation if necessary).
1027        You specify polynomial coefficients, an ascii table or neither.
1028        If you specify neither, then a polynomial correction will be made
1029        with built in coefficients known for certain telescopes (an error
1030        will occur if the instrument is not known).
1031        The data and Tsys are *divided* by the scaling factors.
1032        Parameters:
1033            poly:        Polynomial coefficients (default None) to compute a
1034                         gain-elevation correction as a function of
1035                         elevation (in degrees).
1036            filename:    The name of an ascii file holding correction factors.
1037                         The first row of the ascii file must give the column
1038                         names and these MUST include columns
1039                         "ELEVATION" (degrees) and "FACTOR" (multiply data
1040                         by this) somewhere.
1041                         The second row must give the data type of the
1042                         column. Use 'R' for Real and 'I' for Integer.
1043                         An example file would be
1044                         (actual factors are arbitrary) :
1045
1046                         TIME ELEVATION FACTOR
1047                         R R R
1048                         0.1 0 0.8
1049                         0.2 20 0.85
1050                         0.3 40 0.9
1051                         0.4 60 0.85
1052                         0.5 80 0.8
1053                         0.6 90 0.75
1054            method:      Interpolation method when correcting from a table.
1055                         Values are  "nearest", "linear" (default), "cubic"
1056                         and "spline"
1057            insitu:      if False a new scantable is returned.
1058                         Otherwise, the scaling is done in-situ
1059                         The default is taken from .asaprc (False)
1060        """
1061
1062        if insitu is None: insitu = rcParams['insitu']
1063        self._math._setinsitu(insitu)
1064        varlist = vars()
1065        poly = poly or ()
1066        from os.path import expandvars
1067        filename = expandvars(filename)
1068        s = scantable(self._math._gainel(self, poly, filename, method))
1069        s._add_history("gain_el", varlist)
1070        if insitu:
1071            self._assign(s)
1072        else:
1073            return s
1074
1075    @print_log_dec
1076    def freq_align(self, reftime=None, method='cubic', insitu=None):
1077        """
1078        Return a scan where all rows have been aligned in frequency/velocity.
1079        The alignment frequency frame (e.g. LSRK) is that set by function
1080        set_freqframe.
1081        Parameters:
1082            reftime:     reference time to align at. By default, the time of
1083                         the first row of data is used.
1084            method:      Interpolation method for regridding the spectra.
1085                         Choose from "nearest", "linear", "cubic" (default)
1086                         and "spline"
1087            insitu:      if False a new scantable is returned.
1088                         Otherwise, the scaling is done in-situ
1089                         The default is taken from .asaprc (False)
1090        """
1091        if insitu is None: insitu = rcParams["insitu"]
1092        self._math._setinsitu(insitu)
1093        varlist = vars()
1094        reftime = reftime or ""
1095        s = scantable(self._math._freq_align(self, reftime, method))
1096        s._add_history("freq_align", varlist)
1097        if insitu: self._assign(s)
1098        else: return s
1099
1100    @print_log_dec
1101    def opacity(self, tau, insitu=None):
1102        """
1103        Apply an opacity correction. The data
1104        and Tsys are multiplied by the correction factor.
1105        Parameters:
1106            tau:         Opacity from which the correction factor is
1107                         exp(tau*ZD)
1108                         where ZD is the zenith-distance
1109            insitu:      if False a new scantable is returned.
1110                         Otherwise, the scaling is done in-situ
1111                         The default is taken from .asaprc (False)
1112        """
1113        if insitu is None: insitu = rcParams['insitu']
1114        self._math._setinsitu(insitu)
1115        varlist = vars()
1116        s = scantable(self._math._opacity(self, tau))
1117        s._add_history("opacity", varlist)
1118        if insitu: self._assign(s)
1119        else: return s
1120
1121    @print_log_dec
1122    def bin(self, width=5, insitu=None):
1123        """
1124        Return a scan where all spectra have been binned up.
1125        Parameters:
1126            width:       The bin width (default=5) in pixels
1127            insitu:      if False a new scantable is returned.
1128                         Otherwise, the scaling is done in-situ
1129                         The default is taken from .asaprc (False)
1130        """
1131        if insitu is None: insitu = rcParams['insitu']
1132        self._math._setinsitu(insitu)
1133        varlist = vars()
1134        s = scantable(self._math._bin(self, width))
1135        s._add_history("bin", varlist)
1136        if insitu:
1137            self._assign(s)
1138        else:
1139            return s
1140
1141    @print_log_dec
1142    def resample(self, width=5, method='cubic', insitu=None):
1143        """
1144        Return a scan where all spectra have been binned up.
1145
1146        Parameters:
1147            width:       The bin width (default=5) in pixels
1148            method:      Interpolation method when correcting from a table.
1149                         Values are  "nearest", "linear", "cubic" (default)
1150                         and "spline"
1151            insitu:      if False a new scantable is returned.
1152                         Otherwise, the scaling is done in-situ
1153                         The default is taken from .asaprc (False)
1154        """
1155        if insitu is None: insitu = rcParams['insitu']
1156        self._math._setinsitu(insitu)
1157        varlist = vars()
1158        s = scantable(self._math._resample(self, method, width))
1159        s._add_history("resample", varlist)
1160        if insitu: self._assign(s)
1161        else: return s
1162
1163    @print_log_dec
1164    def average_pol(self, mask=None, weight='none'):
1165        """
1166        Average the Polarisations together.
1167        Parameters:
1168            mask:        An optional mask defining the region, where the
1169                         averaging will be applied. The output will have all
1170                         specified points masked.
1171            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1172                         weighted), or 'tsys' (1/Tsys**2 weighted)
1173        """
1174        varlist = vars()
1175        mask = mask or ()
1176        s = scantable(self._math._averagepol(self, mask, weight.upper()))
1177        s._add_history("average_pol", varlist)
1178        return s
1179
1180    @print_log_dec
1181    def average_beam(self, mask=None, weight='none'):
1182        """
1183        Average the Beams together.
1184        Parameters:
1185            mask:        An optional mask defining the region, where the
1186                         averaging will be applied. The output will have all
1187                         specified points masked.
1188            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
1189                         weighted), or 'tsys' (1/Tsys**2 weighted)
1190        """
1191        varlist = vars()
1192        mask = mask or ()
1193        s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1194        s._add_history("average_beam", varlist)
1195        return s
1196
1197    def parallactify(self, pflag):
1198        varlist = vars()
1199        self._parallactify(pflag)
1200        self._add_history("parallactify", varlist)
1201
1202    @print_log_dec
1203    def convert_pol(self, poltype=None):
1204        """
1205        Convert the data to a different polarisation type.
1206        Note that you will need cross-polarisation terms for most conversions.
1207        Parameters:
1208            poltype:    The new polarisation type. Valid types are:
1209                        "linear", "circular", "stokes" and "linpol"
1210        """
1211        varlist = vars()
1212        try:
1213            s = scantable(self._math._convertpol(self, poltype))
1214        except RuntimeError, msg:
1215            if rcParams['verbose']:
1216                print msg
1217                return
1218            else:
1219                raise
1220        s._add_history("convert_pol", varlist)
1221        return s
1222
1223    @print_log_dec
1224    def smooth(self, kernel="hanning", width=5.0, order=2, insitu=None):
1225        """
1226        Smooth the spectrum by the specified kernel (conserving flux).
1227        Parameters:
1228            kernel:     The type of smoothing kernel. Select from
1229                        'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1230                        or 'poly'
1231            width:      The width of the kernel in pixels. For hanning this is
1232                        ignored otherwise it defauls to 5 pixels.
1233                        For 'gaussian' it is the Full Width Half
1234                        Maximum. For 'boxcar' it is the full width.
1235                        For 'rmedian' and 'poly' it is the half width.
1236            order:      Optional parameter for 'poly' kernel (default is 2), to
1237                        specify the order of the polnomial. Ignored by all other
1238                        kernels.
1239            insitu:     if False a new scantable is returned.
1240                        Otherwise, the scaling is done in-situ
1241                        The default is taken from .asaprc (False)
1242        Example:
1243             none
1244        """
1245        if insitu is None: insitu = rcParams['insitu']
1246        self._math._setinsitu(insitu)
1247        varlist = vars()
1248        s = scantable(self._math._smooth(self, kernel.lower(), width, order))
1249        s._add_history("smooth", varlist)
1250        if insitu: self._assign(s)
1251        else: return s
1252
1253    @print_log_dec
1254    def poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None):
1255        """
1256        Return a scan which has been baselined (all rows) by a polynomial.
1257        Parameters:
1258            mask:       an optional mask
1259            order:      the order of the polynomial (default is 0)
1260            plot:       plot the fit and the residual. In this each
1261                        indivual fit has to be approved, by typing 'y'
1262                        or 'n'
1263            uselin:     use linear polynomial fit
1264            insitu:     if False a new scantable is returned.
1265                        Otherwise, the scaling is done in-situ
1266                        The default is taken from .asaprc (False)
1267        Example:
1268            # return a scan baselined by a third order polynomial,
1269            # not using a mask
1270            bscan = scan.poly_baseline(order=3)
1271        """
1272        if insitu is None: insitu = rcParams['insitu']
1273        varlist = vars()
1274        if mask is None:
1275            mask = [True for i in xrange(self.nchan(-1))]
1276        from asap.asapfitter import fitter
1277        try:
1278            f = fitter()
1279            f.set_scan(self, mask)
1280            #f.set_function(poly=order)
1281            if uselin:
1282                f.set_function(lpoly=order)
1283            else:
1284                f.set_function(poly=order)
1285            s = f.auto_fit(insitu, plot=plot)
1286            s._add_history("poly_baseline", varlist)
1287            if insitu: self._assign(s)
1288            else: return s
1289        except RuntimeError:
1290            msg = "The fit failed, possibly because it didn't converge."
1291            if rcParams['verbose']:
1292                print msg
1293                return
1294            else:
1295                raise RuntimeError(msg)
1296
1297    def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
1298                           threshold=3, chan_avg_limit=1, plot=False,
1299                           insitu=None):
1300        """
1301        Return a scan which has been baselined (all rows) by a polynomial.
1302        Spectral lines are detected first using linefinder and masked out
1303        to avoid them affecting the baseline solution.
1304
1305        Parameters:
1306            mask:       an optional mask retreived from scantable
1307            edge:       an optional number of channel to drop at
1308                        the edge of spectrum. If only one value is
1309                        specified, the same number will be dropped from
1310                        both sides of the spectrum. Default is to keep
1311                        all channels. Nested tuples represent individual
1312                        edge selection for different IFs (a number of spectral
1313                        channels can be different)
1314            order:      the order of the polynomial (default is 0)
1315            threshold:  the threshold used by line finder. It is better to
1316                        keep it large as only strong lines affect the
1317                        baseline solution.
1318            chan_avg_limit:
1319                        a maximum number of consequtive spectral channels to
1320                        average during the search of weak and broad lines.
1321                        The default is no averaging (and no search for weak
1322                        lines). If such lines can affect the fitted baseline
1323                        (e.g. a high order polynomial is fitted), increase this
1324                        parameter (usually values up to 8 are reasonable). Most
1325                        users of this method should find the default value
1326                        sufficient.
1327            plot:       plot the fit and the residual. In this each
1328                        indivual fit has to be approved, by typing 'y'
1329                        or 'n'
1330            insitu:     if False a new scantable is returned.
1331                        Otherwise, the scaling is done in-situ
1332                        The default is taken from .asaprc (False)
1333
1334        Example:
1335            scan2=scan.auto_poly_baseline(order=7)
1336        """
1337        if insitu is None: insitu = rcParams['insitu']
1338        varlist = vars()
1339        from asap.asapfitter import fitter
1340        from asap.asaplinefind import linefinder
1341        from asap import _is_sequence_or_number as _is_valid
1342
1343        # check whether edge is set up for each IF individually
1344        individualedge = False;
1345        if len(edge) > 1:
1346            if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1347                individualedge = True;
1348
1349        if not _is_valid(edge, int) and not individualedge:
1350            raise ValueError, "Parameter 'edge' has to be an integer or a \
1351            pair of integers specified as a tuple. Nested tuples are allowed \
1352            to make individual selection for different IFs."
1353
1354        curedge = (0, 0)
1355        if individualedge:
1356            for edgepar in edge:
1357                if not _is_valid(edgepar, int):
1358                    raise ValueError, "Each element of the 'edge' tuple has \
1359                                       to be a pair of integers or an integer."
1360        else:
1361            curedge = edge;
1362
1363        # setup fitter
1364        f = fitter()
1365        f.set_function(poly=order)
1366
1367        # setup line finder
1368        fl = linefinder()
1369        fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
1370
1371        if not insitu:
1372            workscan = self.copy()
1373        else:
1374            workscan = self
1375
1376        fl.set_scan(workscan)
1377
1378        rows = range(workscan.nrow())
1379        asaplog.push("Processing:")
1380        for r in rows:
1381            msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1382                (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1383                 workscan.getpol(r), workscan.getcycle(r))
1384            asaplog.push(msg, False)
1385
1386            # figure out edge parameter
1387            if individualedge:
1388                if len(edge) >= workscan.getif(r):
1389                    raise RuntimeError, "Number of edge elements appear to " \
1390                                        "be less than the number of IFs"
1391                    curedge = edge[workscan.getif(r)]
1392
1393            # setup line finder
1394            fl.find_lines(r, mask, curedge)
1395            f.set_data(workscan._getabcissa(r),  workscan._getspectrum(r),
1396                        mask_and(workscan._getmask(r), fl.get_mask()))
1397            f.fit()
1398            x = f.get_parameters()
1399            if plot:
1400                f.plot(residual=True)
1401                x = raw_input("Accept fit ( [y]/n ): ")
1402                if x.upper() == 'N':
1403                    continue
1404            workscan._setspectrum(f.fitter.getresidual(), r)
1405        if plot:
1406            f._p.unmap()
1407            f._p = None
1408        workscan._add_history("auto_poly_baseline", varlist)
1409        if insitu:
1410            self._assign(workscan)
1411        else:
1412            return workscan
1413
1414    @print_log_dec
1415    def rotate_linpolphase(self, angle):
1416        """
1417        Rotate the phase of the complex polarization O=Q+iU correlation.
1418        This is always done in situ in the raw data.  So if you call this
1419        function more than once then each call rotates the phase further.
1420        Parameters:
1421            angle:   The angle (degrees) to rotate (add) by.
1422        Examples:
1423            scan.rotate_linpolphase(2.3)
1424        """
1425        varlist = vars()
1426        self._math._rotate_linpolphase(self, angle)
1427        self._add_history("rotate_linpolphase", varlist)
1428        return
1429
1430    @print_log_dec
1431    def rotate_xyphase(self, angle):
1432        """
1433        Rotate the phase of the XY correlation.  This is always done in situ
1434        in the data.  So if you call this function more than once
1435        then each call rotates the phase further.
1436        Parameters:
1437            angle:   The angle (degrees) to rotate (add) by.
1438        Examples:
1439            scan.rotate_xyphase(2.3)
1440        """
1441        varlist = vars()
1442        self._math._rotate_xyphase(self, angle)
1443        self._add_history("rotate_xyphase", varlist)
1444        return
1445
1446    @print_log_dec
1447    def swap_linears(self):
1448        """
1449        Swap the linear polarisations XX and YY, or better the first two
1450        polarisations as this also works for ciculars.
1451        """
1452        varlist = vars()
1453        self._math._swap_linears(self)
1454        self._add_history("swap_linears", varlist)
1455        return
1456
1457    @print_log_dec
1458    def invert_phase(self):
1459        """
1460        Invert the phase of the complex polarisation
1461        """
1462        varlist = vars()
1463        self._math._invert_phase(self)
1464        self._add_history("invert_phase", varlist)
1465        return
1466
1467    @print_log_dec
1468    def add(self, offset, insitu=None):
1469        """
1470        Return a scan where all spectra have the offset added
1471        Parameters:
1472            offset:      the offset
1473            insitu:      if False a new scantable is returned.
1474                         Otherwise, the scaling is done in-situ
1475                         The default is taken from .asaprc (False)
1476        """
1477        if insitu is None: insitu = rcParams['insitu']
1478        self._math._setinsitu(insitu)
1479        varlist = vars()
1480        s = scantable(self._math._unaryop(self, offset, "ADD", False))
1481        s._add_history("add", varlist)
1482        if insitu:
1483            self._assign(s)
1484        else:
1485            return s
1486
1487    @print_log_dec
1488    def scale(self, factor, tsys=True, insitu=None):
1489        """
1490        Return a scan where all spectra are scaled by the give 'factor'
1491        Parameters:
1492            factor:      the scaling factor
1493            insitu:      if False a new scantable is returned.
1494                         Otherwise, the scaling is done in-situ
1495                         The default is taken from .asaprc (False)
1496            tsys:        if True (default) then apply the operation to Tsys
1497                         as well as the data
1498        """
1499        if insitu is None: insitu = rcParams['insitu']
1500        self._math._setinsitu(insitu)
1501        varlist = vars()
1502        s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
1503        s._add_history("scale", varlist)
1504        if insitu:
1505            self._assign(s)
1506        else:
1507            return s
1508
1509    def set_sourcetype(self, match, matchtype="pattern",
1510                       sourcetype="reference"):
1511        """
1512        Set the type of the source to be an source or reference scan
1513        using the provided pattern:
1514        Parameters:
1515            match:          a Unix style pattern, regular expression or selector
1516            matchtype:      'pattern' (default) UNIX style pattern or
1517                            'regex' regular expression
1518            sourcetype:     the type of the source to use (source/reference)
1519        """
1520        varlist = vars()
1521        basesel = self.get_selection()
1522        stype = -1
1523        if sourcetype.lower().startswith("r"):
1524            stype = 1
1525        elif sourcetype.lower().startswith("s"):
1526            stype = 0
1527        else:
1528            raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
1529        if matchtype.lower().startswith("p"):
1530            matchtype = "pattern"
1531        elif matchtype.lower().startswith("r"):
1532            matchtype = "regex"
1533        else:
1534            raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
1535        sel = selector()
1536        if isinstance(match, selector):
1537            sel = match
1538        else:
1539            sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
1540        self.set_selection(basesel+sel)
1541        self._setsourcetype(stype)
1542        self.set_selection(basesel)
1543        self._add_history("set_sourcetype", varlist)
1544
1545    @print_log_dec
1546    def auto_quotient(self, preserve=True, mode='paired'):
1547        """
1548        This function allows to build quotients automatically.
1549        It assumes the observation to have the same numer of
1550        "ons" and "offs"
1551        Parameters:
1552            preserve:       you can preserve (default) the continuum or
1553                            remove it.  The equations used are
1554                            preserve: Output = Toff * (on/off) - Toff
1555                            remove:   Output = Toff * (on/off) - Ton
1556            mode:           the on/off detection mode
1557                            'paired' (default)
1558                            identifies 'off' scans by the
1559                            trailing '_R' (Mopra/Parkes) or
1560                            '_e'/'_w' (Tid) and matches
1561                            on/off pairs from the observing pattern
1562                            'time'
1563                            finds the closest off in time
1564
1565        """
1566        modes = ["time", "paired"]
1567        if not mode in modes:
1568            msg = "please provide valid mode. Valid modes are %s" % (modes)
1569            raise ValueError(msg)
1570        varlist = vars()
1571        s = None
1572        if mode.lower() == "paired":
1573            basesel = self.get_selection()
1574            sel = selector()+basesel
1575            sel.set_query("SRCTYPE==1")
1576            self.set_selection(sel)
1577            offs = self.copy()
1578            sel.set_query("SRCTYPE==0")
1579            self.set_selection(sel)
1580            ons = self.copy()
1581            s = scantable(self._math._quotient(ons, offs, preserve))
1582            self.set_selection(basesel)
1583        elif mode.lower() == "time":
1584            s = scantable(self._math._auto_quotient(self, mode, preserve))
1585        s._add_history("auto_quotient", varlist)
1586        return s
1587
1588    @print_log_dec
1589    def mx_quotient(self, mask = None, weight='median', preserve=True):
1590        """
1591        Form a quotient using "off" beams when observing in "MX" mode.
1592        Parameters:
1593            mask:           an optional mask to be used when weight == 'stddev'
1594            weight:         How to average the off beams.  Default is 'median'.
1595            preserve:       you can preserve (default) the continuum or
1596                            remove it.  The equations used are
1597                            preserve: Output = Toff * (on/off) - Toff
1598                            remove:   Output = Toff * (on/off) - Ton
1599        """
1600        mask = mask or ()
1601        varlist = vars()
1602        on = scantable(self._math._mx_extract(self, 'on'))
1603        preoff = scantable(self._math._mx_extract(self, 'off'))
1604        off = preoff.average_time(mask=mask, weight=weight, scanav=False)
1605        from asapmath  import quotient
1606        q = quotient(on, off, preserve)
1607        q._add_history("mx_quotient", varlist)
1608        return q
1609
1610    @print_log_dec
1611    def freq_switch(self, insitu=None):
1612        """
1613        Apply frequency switching to the data.
1614        Parameters:
1615            insitu:      if False a new scantable is returned.
1616                         Otherwise, the swictching is done in-situ
1617                         The default is taken from .asaprc (False)
1618        Example:
1619            none
1620        """
1621        if insitu is None: insitu = rcParams['insitu']
1622        self._math._setinsitu(insitu)
1623        varlist = vars()
1624        s = scantable(self._math._freqswitch(self))
1625        s._add_history("freq_switch", varlist)
1626        if insitu: self._assign(s)
1627        else: return s
1628
1629    @print_log_dec
1630    def recalc_azel(self):
1631        """
1632        Recalculate the azimuth and elevation for each position.
1633        Parameters:
1634            none
1635        Example:
1636        """
1637        varlist = vars()
1638        self._recalcazel()
1639        self._add_history("recalc_azel", varlist)
1640        return
1641
1642    @print_log_dec
1643    def __add__(self, other):
1644        varlist = vars()
1645        s = None
1646        if isinstance(other, scantable):
1647            s = scantable(self._math._binaryop(self, other, "ADD"))
1648        elif isinstance(other, float):
1649            s = scantable(self._math._unaryop(self, other, "ADD", False))
1650        else:
1651            raise TypeError("Other input is not a scantable or float value")
1652        s._add_history("operator +", varlist)
1653        return s
1654
1655    @print_log_dec
1656    def __sub__(self, other):
1657        """
1658        implicit on all axes and on Tsys
1659        """
1660        varlist = vars()
1661        s = None
1662        if isinstance(other, scantable):
1663            s = scantable(self._math._binaryop(self, other, "SUB"))
1664        elif isinstance(other, float):
1665            s = scantable(self._math._unaryop(self, other, "SUB", False))
1666        else:
1667            raise TypeError("Other input is not a scantable or float value")
1668        s._add_history("operator -", varlist)
1669        return s
1670
1671    @print_log_dec
1672    def __mul__(self, other):
1673        """
1674        implicit on all axes and on Tsys
1675        """
1676        varlist = vars()
1677        s = None
1678        if isinstance(other, scantable):
1679            s = scantable(self._math._binaryop(self, other, "MUL"))
1680        elif isinstance(other, float):
1681            s = scantable(self._math._unaryop(self, other, "MUL", False))
1682        else:
1683            raise TypeError("Other input is not a scantable or float value")
1684        s._add_history("operator *", varlist)
1685        return s
1686
1687
1688    @print_log_dec
1689    def __div__(self, other):
1690        """
1691        implicit on all axes and on Tsys
1692        """
1693        varlist = vars()
1694        s = None
1695        if isinstance(other, scantable):
1696            s = scantable(self._math._binaryop(self, other, "DIV"))
1697        elif isinstance(other, float):
1698            if other == 0.0:
1699                raise ZeroDivisionError("Dividing by zero is not recommended")
1700            s = scantable(self._math._unaryop(self, other, "DIV", False))
1701        else:
1702            raise TypeError("Other input is not a scantable or float value")
1703        s._add_history("operator /", varlist)
1704        return s
1705
1706    def get_fit(self, row=0):
1707        """
1708        Print or return the stored fits for a row in the scantable
1709        Parameters:
1710            row:    the row which the fit has been applied to.
1711        """
1712        if row > self.nrow():
1713            return
1714        from asap.asapfit import asapfit
1715        fit = asapfit(self._getfit(row))
1716        if rcParams['verbose']:
1717            print fit
1718            return
1719        else:
1720            return fit.as_dict()
1721
1722    def flag_nans(self):
1723        """
1724        Utility function to flag NaN values in the scantable.
1725        """
1726        import numpy
1727        basesel = self.get_selection()
1728        for i in range(self.nrow()):
1729            sel = self.get_row_selector(i)
1730            self.set_selection(basesel+sel)
1731            nans = numpy.isnan(self._getspectrum(0))
1732        if numpy.any(nans):
1733            bnans = [ bool(v) for v in nans]
1734            self.flag(bnans)
1735        self.set_selection(basesel)
1736
1737    def get_row_selector(self, rowno):
1738        return selector(beams=self.getbeam(rowno),
1739                        ifs=self.getif(rowno),
1740                        pols=self.getpol(rowno),
1741                        scans=self.getscan(rowno),
1742                        cycles=self.getcycle(rowno))
1743
1744    def _add_history(self, funcname, parameters):
1745        if not rcParams['scantable.history']:
1746            return
1747        # create date
1748        sep = "##"
1749        from datetime import datetime
1750        dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1751        hist = dstr+sep
1752        hist += funcname+sep#cdate+sep
1753        if parameters.has_key('self'): del parameters['self']
1754        for k, v in parameters.iteritems():
1755            if type(v) is dict:
1756                for k2, v2 in v.iteritems():
1757                    hist += k2
1758                    hist += "="
1759                    if isinstance(v2, scantable):
1760                        hist += 'scantable'
1761                    elif k2 == 'mask':
1762                        if isinstance(v2, list) or isinstance(v2, tuple):
1763                            hist += str(self._zip_mask(v2))
1764                        else:
1765                            hist += str(v2)
1766                    else:
1767                        hist += str(v2)
1768            else:
1769                hist += k
1770                hist += "="
1771                if isinstance(v, scantable):
1772                    hist += 'scantable'
1773                elif k == 'mask':
1774                    if isinstance(v, list) or isinstance(v, tuple):
1775                        hist += str(self._zip_mask(v))
1776                    else:
1777                        hist += str(v)
1778                else:
1779                    hist += str(v)
1780            hist += sep
1781        hist = hist[:-2] # remove trailing '##'
1782        self._addhistory(hist)
1783
1784
1785    def _zip_mask(self, mask):
1786        mask = list(mask)
1787        i = 0
1788        segments = []
1789        while mask[i:].count(1):
1790            i += mask[i:].index(1)
1791            if mask[i:].count(0):
1792                j = i + mask[i:].index(0)
1793            else:
1794                j = len(mask)
1795            segments.append([i, j])
1796            i = j
1797        return segments
1798
1799    def _get_ordinate_label(self):
1800        fu = "("+self.get_fluxunit()+")"
1801        import re
1802        lbl = "Intensity"
1803        if re.match(".K.", fu):
1804            lbl = "Brightness Temperature "+ fu
1805        elif re.match(".Jy.", fu):
1806            lbl = "Flux density "+ fu
1807        return lbl
1808
1809    def _check_ifs(self):
1810        nchans = [self.nchan(i) for i in range(self.nif(-1))]
1811        nchans = filter(lambda t: t > 0, nchans)
1812        return (sum(nchans)/len(nchans) == nchans[0])
1813
1814    def _fill(self, names, unit, average):
1815        import os
1816        from asap._asap import stfiller
1817        first = True
1818        fullnames = []
1819        for name in names:
1820            name = os.path.expandvars(name)
1821            name = os.path.expanduser(name)
1822            if not os.path.exists(name):
1823                msg = "File '%s' does not exists" % (name)
1824                if rcParams['verbose']:
1825                    asaplog.push(msg)
1826                    print asaplog.pop().strip()
1827                    return
1828                raise IOError(msg)
1829            fullnames.append(name)
1830        if average:
1831            asaplog.push('Auto averaging integrations')
1832        stype = int(rcParams['scantable.storage'].lower() == 'disk')
1833        for name in fullnames:
1834            tbl = Scantable(stype)
1835            r = stfiller(tbl)
1836            rx = rcParams['scantable.reference']
1837            r._setreferenceexpr(rx)
1838            msg = "Importing %s..." % (name)
1839            asaplog.push(msg, False)
1840            print_log()
1841            r._open(name, -1, -1)
1842            r._read()
1843            if average:
1844                tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
1845            if not first:
1846                tbl = self._math._merge([self, tbl])
1847            Scantable.__init__(self, tbl)
1848            r._close()
1849            del r, tbl
1850            first = False
1851        if unit is not None:
1852            self.set_fluxunit(unit)
1853        self.set_freqframe(rcParams['scantable.freqframe'])
1854
1855    def __getitem__(self, key):
1856        if key < 0:
1857            key += self.nrow()
1858        if key >= self.nrow():
1859            raise IndexError("Row index out of range.")
1860        return self._getspectrum(key)
1861
1862    def __setitem__(self, key, value):
1863        if key < 0:
1864            key += self.nrow()
1865        if key >= self.nrow():
1866            raise IndexError("Row index out of range.")
1867        if not hasattr(value, "__len__") or \
1868                len(value) > self.nchan(self.getif(key)):
1869            raise ValueError("Spectrum length doesn't match.")
1870        return self._setspectrum(value, key)
1871
1872    def __len__(self):
1873        return self.nrow()
1874
1875    def __iter__(self):
1876        for i in range(len(self)):
1877            yield self[i]
Note: See TracBrowser for help on using the repository browser.