source: trunk/python/scantable.py @ 919

Last change on this file since 919 was 919, checked in by mar637, 18 years ago

fixed indentation error left by Maxim's last check-in

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 52.3 KB
Line 
1from asap._asap import Scantable
2from asap import rcParams
3from asap import print_log
4from numarray import ones,zeros
5import sys
6
7class scantable(Scantable):
8    """
9        The ASAP container for scans
10    """
11
12    def __init__(self, filename, average=None, unit=None):
13        """
14        Create a scantable from a saved one or make a reference
15        Parameters:
16            filename:    the name of an asap table on disk
17                         or
18                         the name of a rpfits/sdfits/ms file
19                         (integrations within scans are auto averaged
20                         and the whole file is read)
21                         or
22                         [advanced] a reference to an existing
23                         scantable
24            average:     average all integrations withinb a scan on read.
25                         The default (True) is taken from .asaprc.
26            unit:         brightness unit; must be consistent with K or Jy.
27                         Over-rides the default selected by the reader
28                         (input rpfits/sdfits/ms) or replaces the value
29                         in existing scantables
30        """
31        if average is None or type(average) is not bool:
32            average = rcParams['scantable.autoaverage']
33
34        varlist = vars()
35        from asap._asap import stmath
36        self._math = stmath()
37        from asap import asaplog
38        if isinstance(filename, Scantable):
39            Scantable.__init__(self, filename)
40            if unit is not None:
41                self.set_fluxunit(unit)
42        else:
43            import os.path
44            if not os.path.exists(filename):
45                s = "File '%s' not found." % (filename)
46                if rcParams['verbose']:
47                    asaplog.push(s)
48                    print asaplog.pop().strip()
49                    return
50                raise IOError(s)
51            filename = os.path.expandvars(filename)
52            if os.path.isdir(filename):
53                # crude check if asap table
54                if os.path.exists(filename+'/table.info'):
55                    Scantable.__init__(self, filename, "memory")
56                    if unit is not None:
57                        self.set_fluxunit(unit)
58                else:
59                    msg = "The given file '%s'is not a valid asap table." % (filename)
60                    if rcParams['verbose']:
61                        print msg
62                        return
63                    else:
64                        raise IOError(msg)
65            else:
66                from asap._asap import stfiller
67                ifSel = -1
68                beamSel = -1
69                r = stfiller()
70                r._open(filename,ifSel,beamSel)
71                asaplog.push('Importing data...')
72                print_log()
73                r._read()
74                tbl = r._getdata()
75                if unit is not None:
76                    tbl.set_fluxunit(unit)
77                if average:
78                    asaplog.push('Auto averaging integrations...')
79                    print_log()
80                    tbl2 = self._math._average((tbl,),(),'NONE','SCAN', False)
81                    Scantable.__init__(self,tbl2)
82                    del tbl2
83                else:
84                    Scantable.__init__(self,tbl)
85                del r,tbl
86                self._add_history("scantable", varlist)
87        print_log()
88
89    def save(self, name=None, format=None, overwrite=False):
90        """
91        Store the scantable on disk. This can be an asap (aips++) Table, SDFITS,
92        Image FITS or MS2 format.
93        Parameters:
94            name:        the name of the outputfile. For format="FITS" this
95                         is the directory file name into which all the files
96                         will be written (default is 'asap_FITS'). For format
97                         "ASCII" this is the root file name (data in 'name'.txt
98                         and header in 'name'_header.txt)
99            format:      an optional file format. Default is ASAP.
100                         Allowed are - 'ASAP' (save as ASAP [aips++] Table),
101                                       'SDFITS' (save as SDFITS file)
102                                       'FITS' (saves each row as a FITS Image)
103                                       'ASCII' (saves as ascii text file)
104                                       'MS2' (saves as an aips++
105                                              MeasurementSet V2)
106            overwrite:   If the file should be overwritten if it exists.
107                         The default False is to return with warning
108                         without writing the output. USE WITH CARE.
109        Example:
110            scan.save('myscan.asap')
111            scan.save('myscan.sdfits','SDFITS')
112        """
113        from os import path
114        if format is None: format = rcParams['scantable.save']
115        suffix = '.'+format.lower()
116        if name is None or name =="":
117            name = 'scantable'+suffix
118            from asap import asaplog
119            msg = "No filename given. Using default name %s..." % name
120            asaplog.push(msg)
121        name = path.expandvars(name)
122        if path.isfile(name) or path.isdir(name):
123            if not overwrite:
124                msg = "File %s exists." % name
125                if rcParams['verbose']:
126                    print msg
127                    return
128                else:
129                    raise IOError(msg)
130        format2 = format.upper()
131        if format2 == 'ASAP':
132            self._save(name)
133        else:
134            print "NYI"
135#             from asap._asap import sdwriter as _sw
136#             w = _sw(format2)
137#             w.write(self, name, stokes)
138
139        print_log()
140        return
141
142    def copy(self):
143        """
144        Return a copy of this scantable.
145        Parameters:
146            none
147        Example:
148            copiedscan = scan.copy()
149        """
150        sd = scantable(Scantable._copy(self))
151        return sd
152
153    def get_scan(self, scanid=None):
154        """
155        Return a specific scan (by scanno) or collection of scans (by
156        source name) in a new scantable.
157        Parameters:
158            scanid:    a (list of) scanno or a source name, unix-style
159                       patterns are accepted for source name matching, e.g.
160                       '*_R' gets all 'ref scans
161        Example:
162            # get all scans containing the source '323p459'
163            newscan = scan.get_scan('323p459')
164            # get all 'off' scans
165            refscans = scan.get_scan('*_R')
166            # get a susbset of scans by scanno (as listed in scan.summary())
167            newscan = scan.get_scan([0,2,7,10])
168        """
169        if scanid is None:
170            if rcParams['verbose']:
171                print "Please specify a scan no or name to retrieve from the scantable"
172                return
173            else:
174                raise RuntimeError("No scan given")
175
176        try:
177            bsel = self._getselection()
178            sel = asap._asap.Selector()
179            if type(scanid) is str:
180                sel._setname()
181                self._setselection(sel)
182                scopy = self._copy()
183                self._setselection(bsel)
184                return scantable(scopy)
185            elif type(scanid) is int:
186                sel._setscans([scanid])
187                self._setselection(sel)
188                scopy = self._copy()
189                self._setselection(bsel)
190                return scantable(scopy)
191            elif type(scanid) is list:
192                sel._setscans(scanid)
193                self._setselection(sel)
194                scopy = self._copy()
195                self._setselection(bsel)
196                return scantable(scopy)
197            else:
198                msg = "Illegal scanid type, use 'int' or 'list' if ints."
199                if rcParams['verbose']:
200                    print msg
201                else:
202                    raise TypeError(msg)
203        except RuntimeError:
204            if rcParams['verbose']: print "Couldn't find any match."
205            else: raise
206
207    def __str__(self):
208        return Scantable._summary(self,True)
209
210    def summary(self, filename=None, verbose=None):
211        """
212        Print a summary of the contents of this scantable.
213        Parameters:
214            filename:    the name of a file to write the putput to
215                         Default - no file output
216            verbose:     print extra info such as the frequency table
217                         The default (False) is taken from .asaprc
218        """
219        info = Scantable._summary(self, verbose)
220        if verbose is None: verbose = rcParams['scantable.verbosesummary']
221        if filename is not None:
222            if filename is "":
223                filename = 'scantable_summary.txt'
224            from os.path import expandvars, isdir
225            filename = expandvars(filename)
226            if not isdir(filename):
227                data = open(filename, 'w')
228                data.write(info)
229                data.close()
230            else:
231                msg = "Illegal file name '%s'." % (filename)
232                if rcParams['verbose']:
233                    print msg
234                else:
235                    raise IOError(msg)
236        if rcParams['verbose']:
237            try:
238                from IPython.genutils import page as pager
239            except ImportError:
240                from pydoc import pager
241            pager(info)
242        else:
243            return info
244
245    def set_cursor(self, beam=0, IF=0, pol=0):
246        """
247        Set the spectrum for individual operations.
248        Parameters:
249            beam, IF, pol:    a number
250        Example:
251            scan.set_cursor(0,0,1)
252            pol1sig = scan.stats(all=False) # returns std dev for beam=0
253                                            # if=0, pol=1
254        """
255        print "DEPRECATED"
256        varlist = vars()
257        sel = asap._asap.Selector()
258        sel._setbeams([beam])
259        sel._setpols([pol])
260        sel._setifs([IF])
261        self._add_history("set_cursor", varlist)
262        return
263
264    def get_cursor(self):
265        """
266        Return/print a the current 'cursor' into the Beam/IF/Pol cube.
267        Parameters:
268            none
269        Returns:
270            a list of values (currentBeam,currentIF,currentPol)
271        Example:
272            none
273        """
274        print "DEPRECATED"
275        sel = self._getselection()
276        i = sel.getbeams()[0]
277        j = sel.getifs()[0]
278        k = sel.getpols()[0]
279        from asap import asaplog
280        out = "--------------------------------------------------\n"
281        out += " Cursor position\n"
282        out += "--------------------------------------------------\n"
283        out += 'Beam=%d IF=%d Pol=%d ' % (i,j,k)
284        asaplog.push(out)
285        print_log()
286        return i,j,k
287
288    def stats(self, stat='stddev', mask=None):
289        """
290        Determine the specified statistic of the current beam/if/pol
291        Takes a 'mask' as an optional parameter to specify which
292        channels should be excluded.
293        Parameters:
294            stat:    'min', 'max', 'sumsq', 'sum', 'mean'
295                     'var', 'stddev', 'avdev', 'rms', 'median'
296            mask:    an optional mask specifying where the statistic
297                     should be determined.
298        Example:
299            scan.set_unit('channel')
300            msk = scan.create_mask([100,200],[500,600])
301            scan.stats(stat='mean', mask=m)
302        """
303        from numarray import array,zeros,Float
304        if mask == None:
305            mask = []
306        axes = ['Beam','IF','Pol','Time']
307        if not self._check_ifs():
308             raise ValueError("Cannot apply mask as the IFs have different number of channels"
309                              "Please use setselection() to select individual IFs")
310
311        statvals = self._math._stats(self, mask, stat)
312        out = ''
313        axes = []
314        for i in range(self.nrow()):
315            axis = []
316            axis.append(self.getscan(i))
317            axis.append(self.getbeam(i))
318            axis.append(self.getif(i))
319            axis.append(self.getpol(i))
320            axis.append(self.getcycle(i))
321            axes.append(axis)
322            tm = self._gettime(i)
323            src = self._getsourcename(i)
324            out += 'Scan[%d] (%s) ' % (axis[0], src)
325            out += 'Time[%s]:\n' % (tm)
326            if self.nbeam(-1) > 1: out +=  ' Beam[%d] ' % (axis[1])
327            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (axis[2])
328            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (axis[3])
329            out += '= %3.3f\n' % (statvals[i])
330            out +=  "--------------------------------------------------\n"
331
332        if rcParams['verbose']:
333            print "--------------------------------------------------"
334            print " ",stat
335            print "--------------------------------------------------"
336            print out
337        retval = { 'axesnames': ['scanno','beamno','ifno','polno','cycleno'],
338                   'axes' : axes,
339                   'data': statvals}
340        return retval
341
342    def stddev(self,mask=None):
343        """
344        Determine the standard deviation of the current beam/if/pol
345        Takes a 'mask' as an optional parameter to specify which
346        channels should be excluded.
347        Parameters:
348            mask:    an optional mask specifying where the standard
349                     deviation should be determined.
350
351        Example:
352            scan.set_unit('channel')
353            msk = scan.create_mask([100,200],[500,600])
354            scan.stddev(mask=m)
355        """
356        return self.stats(stat='stddev',mask=mask);
357
358    def get_tsys(self):
359        """
360        Return the System temperatures.
361        Parameters:
362
363        Returns:
364            a list of Tsys values for the current selection
365        """
366
367        return self._row_callback(self._gettsys, "Tsys")
368
369    def _row_callback(self, callback, label):
370        axes = []
371        axesnames = ['scanno','beamno','ifno','polno','cycleno']
372        out = ""
373        outvec =[]
374        for i in range(self.nrow()):
375            axis = []
376            axis.append(self.getscan(i))
377            axis.append(self.getbeam(i))
378            axis.append(self.getif(i))
379            axis.append(self.getpol(i))
380            axis.append(self.getcycle(i))
381            axes.append(axis)
382            tm = self._gettime(i)
383            src = self._getsourcename(i)
384            out += 'Scan[%d] (%s) ' % (axis[0], src)
385            out += 'Time[%s]:\n' % (tm)
386            if self.nbeam(-1) > 1: out +=  ' Beam[%d] ' % (axis[1])
387            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (axis[2])
388            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (axis[3])
389            outvec.append(callback(i))
390            out += '= %3.3f\n' % (outvec[i])
391            out +=  "--------------------------------------------------\n"
392        if rcParams['verbose']:
393            print "--------------------------------------------------"
394            print " %s" % (label)
395            print "--------------------------------------------------"
396            print out
397        retval = {'axesnames': axesnames, 'axes': axes, 'data': outvec}
398        return retval
399
400
401    def get_time(self, row=-1):
402        """
403        Get a list of time stamps for the observations.
404        Return a string for each integration in the scantable.
405        Parameters:
406            row:    row no of integration. Default -1 return all rows
407        Example:
408            none
409        """
410        out = []
411        if row == -1:
412            for i in range(self.nrow()):
413                out.append(self._gettime(i))
414            return out
415        else:
416            if row < self.nrow():
417                return self._gettime(row)
418
419    def get_sourcename(self, row=-1):
420        """
421        Get a list source names for the observations.
422        Return a string for each integration in the scantable.
423        Parameters:
424            row:    row no of integration. Default -1 return all rows
425        Example:
426            none
427        """
428        out = []
429        if row == -1:
430            return [self._getsourcename(i) for i in range(self.nrow())]
431        else:
432            if  0 <= row < self.nrow():
433                return self._getsourcename(row)
434
435    def get_elevation(self, row=-1):
436        """
437        Get a list of elevations for the observations.
438        Return a float for each integration in the scantable.
439        Parameters:
440            row:    row no of integration. Default -1 return all rows
441        Example:
442            none
443        """
444        out = []
445        if row == -1:
446            return [self._getelevation(i) for i in range(self.nrow())]
447        else:
448            if  0 <= row < self.nrow():
449                return self._getelevation(row)
450
451    def get_azimuth(self, row=-1):
452        """
453        Get a list of azimuths for the observations.
454        Return a float for each integration in the scantable.
455        Parameters:
456            row:    row no of integration. Default -1 return all rows
457        Example:
458            none
459        """
460        out = []
461        if row == -1:
462            return [self._getazimuth(i) for i in range(self.nrow())]
463        else:
464            if  0 <= row < self.nrow():
465                return self._getazimuth(row)
466
467    def get_parangle(self, row=-1):
468        """
469        Get a list of parallactic angles for the observations.
470        Return a float for each integration in the scantable.
471        Parameters:
472            row:    row no of integration. Default -1 return all rows
473        Example:
474            none
475        """
476        out = []
477        if row == -1:
478            return [self._getparangle(i) for i in range(self.nrow())]
479        else:
480            if  0 <= row < self.nrow():
481                return self._getparangle(row)
482
483    def set_unit(self, unit='channel'):
484        """
485        Set the unit for all following operations on this scantable
486        Parameters:
487            unit:    optional unit, default is 'channel'
488                     one of '*Hz','km/s','channel', ''
489        """
490        varlist = vars()
491        if unit in ['','pixel', 'channel']:
492            unit = ''
493        inf = list(self._getcoordinfo())
494        inf[0] = unit
495        self._setcoordinfo(inf)
496        self._add_history("set_unit",varlist)
497
498    def set_instrument(self, instr):
499        """
500        Set the instrument for subsequent processing
501        Parameters:
502            instr:    Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
503                      'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
504        """
505        self._setInstrument(instr)
506        self._add_history("set_instument",vars())
507        print_log()
508
509    def set_doppler(self, doppler='RADIO'):
510        """
511        Set the doppler for all following operations on this scantable.
512        Parameters:
513            doppler:    One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
514        """
515        varlist = vars()
516        inf = list(self._getcoordinfo())
517        inf[2] = doppler
518        self._setcoordinfo(inf)
519        self._add_history("set_doppler",vars())
520        print_log()
521
522    def set_freqframe(self, frame=None):
523        """
524        Set the frame type of the Spectral Axis.
525        Parameters:
526            frame:   an optional frame type, default 'LSRK'. Valid frames are:
527                     'REST','TOPO','LSRD','LSRK','BARY',
528                     'GEO','GALACTO','LGROUP','CMB'
529        Examples:
530            scan.set_freqframe('BARY')
531        """
532        if frame is None: frame = rcParams['scantable.freqframe']
533        varlist = vars()
534        valid = ['REST','TOPO','LSRD','LSRK','BARY', \
535                   'GEO','GALACTO','LGROUP','CMB']
536
537        if 1:#frame in valid:
538            inf = list(self._getcoordinfo())
539            inf[1] = frame
540            self._setcoordinfo(inf)
541            self._add_history("set_freqframe",varlist)
542        else:
543            msg  = "Please specify a valid freq type. Valid types are:\n",valid
544            if rcParams['verbose']:
545                print msg
546            else:
547                raise TypeError(msg)
548        print_log()
549
550    def get_unit(self):
551        """
552        Get the default unit set in this scantable
553        Parameters:
554        Returns:
555            A unit string
556        """
557        inf = self._getcoordinfo()
558        unit = inf[0]
559        if unit == '': unit = 'channel'
560        return unit
561
562    def get_abcissa(self, rowno=0):
563        """
564        Get the abcissa in the current coordinate setup for the currently
565        selected Beam/IF/Pol
566        Parameters:
567            rowno:    an optional row number in the scantable. Default is the
568                      first row, i.e. rowno=0
569        Returns:
570            The abcissa values and it's format string (as a dictionary)
571        """
572        abc = self._getabcissa(rowno)
573        lbl = self._getabcissalabel(rowno)
574        print_log()
575        return abc, lbl
576
577    def create_mask(self, *args, **kwargs):
578        """
579        Compute and return a mask based on [min,max] windows.
580        The specified windows are to be INCLUDED, when the mask is
581        applied.
582        Parameters:
583            [min,max],[min2,max2],...
584                Pairs of start/end points specifying the regions
585                to be masked
586            invert:     optional argument. If specified as True,
587                        return an inverted mask, i.e. the regions
588                        specified are EXCLUDED
589            row:        create the mask using the specified row for
590                        unit conversions, default is row=0
591                        only necessary if frequency varies over rows.
592        Example:
593            scan.set_unit('channel')
594
595            a)
596            msk = scan.set_mask([400,500],[800,900])
597            # masks everything outside 400 and 500
598            # and 800 and 900 in the unit 'channel'
599
600            b)
601            msk = scan.set_mask([400,500],[800,900], invert=True)
602            # masks the regions between 400 and 500
603            # and 800 and 900 in the unit 'channel'
604
605        """
606        row = 0
607        if kwargs.has_key("row"):
608            row = kwargs.get("row")
609        data = self._getabcissa(row)
610        u = self._getcoordinfo()[0]
611        if rcParams['verbose']:
612            if u == "": u = "channel"
613            from asap import asaplog
614            msg = "The current mask window unit is %s" % u
615            if not self._check_ifs():
616                msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
617            asaplog.push(msg)
618        n = self.nchan()
619        msk = zeros(n)
620        # test if args is a 'list' or a 'normal *args - UGLY!!!
621
622        ws = (isinstance(args[-1][-1],int) or isinstance(args[-1][-1],float)) and args or args[0]
623        for window in ws:
624            if (len(window) != 2 or window[0] > window[1] ):
625                raise TypeError("A window needs to be defined as [min,max]")
626            for i in range(n):
627                if data[i] >= window[0] and data[i] < window[1]:
628                    msk[i] = 1
629        if kwargs.has_key('invert'):
630            if kwargs.get('invert'):
631                from numarray import logical_not
632                msk = logical_not(msk)
633        print_log()
634        return msk
635
636    def get_restfreqs(self):
637        """
638        Get the restfrequency(s) stored in this scantable.
639        The return value(s) are always of unit 'Hz'
640        Parameters:
641            none
642        Returns:
643            a list of doubles
644        """
645        return list(self._getrestfreqs())
646
647
648#     def set_restfreqs(self, freqs=None, unit='Hz', lines=None, source=None,
649#                       theif=None):
650#         """
651#         Select the restfrequency for the specified source and IF OR
652#         replace for all IFs.  If the 'freqs' argument holds a scalar,
653#         then that rest frequency will be applied to the selected
654#         data (and added to the list of available rest frequencies).
655#         In this way, you can set a rest frequency for each
656#         source and IF combination.   If the 'freqs' argument holds
657#         a vector, then it MUST be of length the number of IFs
658#         (and the available restfrequencies will be replaced by
659#         this vector).  In this case, *all* data ('source' and
660#         'theif' are ignored) have the restfrequency set per IF according
661#         to the corresponding value you give in the 'freqs' vector.
662#         E.g. 'freqs=[1e9,2e9]'  would mean IF 0 gets restfreq 1e9 and
663#         IF 1 gets restfreq 2e9.
664#
665#         You can also specify the frequencies via known line names
666#         in the argument 'lines'.  Use 'freqs' or 'lines'.  'freqs'
667#         takes precedence. See the list of known names via function
668#         scantable.lines()
669#         Parameters:
670#             freqs:   list of rest frequencies
671#             unit:    unit for rest frequency (default 'Hz')
672#             lines:   list of known spectral lines names (alternative to freqs).
673#                      See possible list via scantable.lines()
674#             source:  Source name (blank means all)
675#             theif:   IF (-1 means all)
676#         Example:
677#             scan.set_restfreqs(freqs=1.4e9, source='NGC253', theif=2)
678#             scan.set_restfreqs(freqs=[1.4e9,1.67e9])
679#         """
680#         varlist = vars()
681#         if source is None:
682#             source = ""
683#         if theif is None:
684#             theif = -1
685#         t = type(freqs)
686#         if t is int or t is float:
687#            freqs = [freqs]
688#         if freqs is None:
689#            freqs = []
690#         t = type(lines)
691#         if t is str:
692#            lines = [lines]
693#         if lines is None:
694#            lines = []
695#         self._setrestfreqs(freqs, unit, lines, source, theif)
696#         self._add_history("set_restfreqs", varlist)
697#
698
699
700    def history(self):
701        hist = list(self._gethistory())
702        out = "-"*80
703        for h in hist:
704            if h.startswith("---"):
705                out += "\n"+h
706            else:
707                items = h.split("##")
708                date = items[0]
709                func = items[1]
710                items = items[2:]
711                out += "\n"+date+"\n"
712                out += "Function: %s\n  Parameters:" % (func)
713                for i in items:
714                    s = i.split("=")
715                    out += "\n   %s = %s" % (s[0],s[1])
716                out += "\n"+"-"*80
717        try:
718            from IPython.genutils import page as pager
719        except ImportError:
720            from pydoc import pager
721        pager(out)
722        return
723
724    #
725    # Maths business
726    #
727
728    def average_time(self, mask=None, scanav=False, weight='tint'):
729        """
730        Return the (time) average of a scan, or apply it 'insitu'.
731        Note:
732            in channels only
733            The cursor of the output scan is set to 0.
734        Parameters:
735            one scan or comma separated  scans
736            mask:     an optional mask (only used for 'var' and 'tsys'
737                      weighting)
738            scanav:   True averages each scan separately
739                      False (default) averages all scans together,
740            weight:   Weighting scheme. 'none', 'var' (1/var(spec)
741                      weighted), 'tsys' (1/Tsys**2 weighted), 'tint'
742                      (integration time weighted) or 'tintsys' (Tint/Tsys**2).
743                      The default is 'tint'
744        Example:
745            # time average the scantable without using a mask
746            newscan = scan.average_time()
747        """
748        varlist = vars()
749        if weight is None: weight = 'tint'
750        if mask is None: mask = ()
751        if scanav:
752          scanav = "SCAN"
753        else:
754          scanav = "NONE"
755        s = scantable(self._math._average((self,), mask, weight, scanav, False))
756        s._add_history("average_time",varlist)
757        print_log()
758        return s
759
760    def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
761        """
762        Return a scan where all spectra are converted to either
763        Jansky or Kelvin depending upon the flux units of the scan table.
764        By default the function tries to look the values up internally.
765        If it can't find them (or if you want to over-ride), you must
766        specify EITHER jyperk OR eta (and D which it will try to look up
767        also if you don't set it). jyperk takes precedence if you set both.
768        Parameters:
769            jyperk:      the Jy / K conversion factor
770            eta:         the aperture efficiency
771            d:           the geomtric diameter (metres)
772            insitu:      if False a new scantable is returned.
773                         Otherwise, the scaling is done in-situ
774                         The default is taken from .asaprc (False)
775            allaxes:         if True apply to all spectra. Otherwise
776                         apply only to the selected (beam/pol/if)spectra only
777                         The default is taken from .asaprc (True if none)
778        """
779        if insitu is None: insitu = rcParams['insitu']
780        self._math._setinsitu(insitu)
781        varlist = vars()
782        if jyperk is None: jyperk = -1.0
783        if d is None: d = -1.0
784        if eta is None: eta = -1.0
785        s = scantable(self._math._convertflux(self, d, eta, jyperk))
786        s._add_history("convert_flux", varlist)
787        print_log()
788        if insitu: self._assign(s)
789        else: return s
790
791    def gain_el(self, poly=None, filename="", method="linear", insitu=None):
792        """
793        Return a scan after applying a gain-elevation correction.
794        The correction can be made via either a polynomial or a
795        table-based interpolation (and extrapolation if necessary).
796        You specify polynomial coefficients, an ascii table or neither.
797        If you specify neither, then a polynomial correction will be made
798        with built in coefficients known for certain telescopes (an error
799        will occur if the instrument is not known).
800        The data and Tsys are *divided* by the scaling factors.
801        Parameters:
802            poly:        Polynomial coefficients (default None) to compute a
803                         gain-elevation correction as a function of
804                         elevation (in degrees).
805            filename:    The name of an ascii file holding correction factors.
806                         The first row of the ascii file must give the column
807                         names and these MUST include columns
808                         "ELEVATION" (degrees) and "FACTOR" (multiply data
809                         by this) somewhere.
810                         The second row must give the data type of the
811                         column. Use 'R' for Real and 'I' for Integer.
812                         An example file would be
813                         (actual factors are arbitrary) :
814
815                         TIME ELEVATION FACTOR
816                         R R R
817                         0.1 0 0.8
818                         0.2 20 0.85
819                         0.3 40 0.9
820                         0.4 60 0.85
821                         0.5 80 0.8
822                         0.6 90 0.75
823            method:      Interpolation method when correcting from a table.
824                         Values are  "nearest", "linear" (default), "cubic"
825                         and "spline"
826            insitu:      if False a new scantable is returned.
827                         Otherwise, the scaling is done in-situ
828                         The default is taken from .asaprc (False)
829        """
830
831        if insitu is None: insitu = rcParams['insitu']
832        self._math._setinsitu(insitu)
833        varlist = vars()
834        if poly is None:
835           poly = ()
836        from os.path import expandvars
837        filename = expandvars(filename)
838        s = scantable(self._math._gainel(self, poly, filename, method))
839        s._add_history("gain_el", varlist)
840        print_log()
841        if insitu: self._assign(s)
842        else: return s
843
844    def freq_align(self, reftime=None, method='cubic', perif=False,
845                   insitu=None):
846        """
847        Return a scan where all rows have been aligned in frequency/velocity.
848        The alignment frequency frame (e.g. LSRK) is that set by function
849        set_freqframe.
850        Parameters:
851            reftime:     reference time to align at. By default, the time of
852                         the first row of data is used.
853            method:      Interpolation method for regridding the spectra.
854                         Choose from "nearest", "linear", "cubic" (default)
855                         and "spline"
856            perif:       Generate aligners per freqID (no doppler tracking) or
857                         per IF (scan-based doppler tracking)
858            insitu:      if False a new scantable is returned.
859                         Otherwise, the scaling is done in-situ
860                         The default is taken from .asaprc (False)
861        """
862        print "Not yet implemented"
863        return
864        if insitu is None: insitu = rcParams['insitu']
865        self._math._setinsitu(insitu)
866        varlist = vars()
867        if reftime is None: reftime = ''
868        perfreqid = not perif
869        s = scantable(self._math._freqalign(self, reftime, method, perfreqid))
870        s._add_history("freq_align", varlist)
871        print_log()
872        if insitu: self._assign(s)
873        else: return s
874
875    def opacity(self, tau, insitu=None):
876        """
877        Apply an opacity correction. The data
878        and Tsys are multiplied by the correction factor.
879        Parameters:
880            tau:         Opacity from which the correction factor is
881                         exp(tau*ZD)
882                         where ZD is the zenith-distance
883            insitu:      if False a new scantable is returned.
884                         Otherwise, the scaling is done in-situ
885                         The default is taken from .asaprc (False)
886        """
887        if insitu is None: insitu = rcParams['insitu']
888        self._math._setinsitu(insitu)
889        varlist = vars()
890        s = scantable(self._math._opacity(self, tau))
891        s._add_history("opacity", varlist)
892        print_log()
893        if insitu: self._assign(s)
894        else: return s
895
896    def bin(self, width=5, insitu=None):
897        """
898        Return a scan where all spectra have been binned up.
899            width:       The bin width (default=5) in pixels
900            insitu:      if False a new scantable is returned.
901                         Otherwise, the scaling is done in-situ
902                         The default is taken from .asaprc (False)
903        """
904        if insitu is None: insitu = rcParams['insitu']
905        self._math._setinsitu(insitu)
906        varlist = vars()
907        s = scantable(self._math._bin(self, width))
908        s._add_history("bin",varlist)
909        print_log()
910        if insitu: self._assign(s)
911        else: return s
912
913
914    def resample(self, width=5, method='cubic', insitu=None):
915        """
916        Return a scan where all spectra have been binned up
917            width:       The bin width (default=5) in pixels
918            method:      Interpolation method when correcting from a table.
919                         Values are  "nearest", "linear", "cubic" (default)
920                         and "spline"
921            insitu:      if False a new scantable is returned.
922                         Otherwise, the scaling is done in-situ
923                         The default is taken from .asaprc (False)
924        """
925        if insitu is None: insitu = rcParams['insitu']
926        self._math._setinsitu(insitu)
927        varlist = vars()
928        s = scantable(self._math._resample(self, method, width))
929        s._add_history("resample",varlist)
930        print_log()
931        if insitu: self._assign(s)
932        else: return s
933
934
935#     def average_pol(self, mask=None, weight='none', insitu=None):
936#         """
937#         Average the Polarisations together.
938#         The polarisation cursor of the output scan is set to 0
939#         Parameters:
940#             mask:        An optional mask defining the region, where the
941#                          averaging will be applied. The output will have all
942#                          specified points masked.
943#             weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
944#                          weighted), or 'tsys' (1/Tsys**2 weighted)
945#             insitu:      if False a new scantable is returned.
946#                          Otherwise, the scaling is done in-situ
947#                          The default is taken from .asaprc (False)
948#         """
949#         if insitu is None: insitu = rcParams['insitu']
950#         self._math._setinsitu(insitu)
951#         varlist = vars()
952#
953#         if mask is None:
954#         mask = ()
955#         s = self._math._averagepol(self, mask, weight)
956#         s._add_history("average_pol",varlist)
957#         print_log()
958#         if insitu: self._assign(s)
959#         else: return scantable(s)
960
961    def smooth(self, kernel="hanning", width=5.0, insitu=None):
962        """
963        Smooth the spectrum by the specified kernel (conserving flux).
964        Parameters:
965            scan:       The input scan
966            kernel:     The type of smoothing kernel. Select from
967                        'hanning' (default), 'gaussian' and 'boxcar'.
968                        The first three characters are sufficient.
969            width:      The width of the kernel in pixels. For hanning this is
970                        ignored otherwise it defauls to 5 pixels.
971                        For 'gaussian' it is the Full Width Half
972                        Maximum. For 'boxcar' it is the full width.
973            insitu:     if False a new scantable is returned.
974                        Otherwise, the scaling is done in-situ
975                        The default is taken from .asaprc (False)
976        Example:
977             none
978        """
979        if insitu is None: insitu = rcParams['insitu']
980        self._math._setinsitu(insitu)
981        varlist = vars()
982        s = scantable(self._math._smooth(self,kernel,width))
983        s._add_history("smooth", varlist)
984        print_log()
985        if insitu: self._assign(s)
986        else: return s
987
988
989    def poly_baseline(self, mask=None, order=0, insitu=None):
990        """
991        Return a scan which has been baselined (all rows) by a polynomial.
992        Parameters:
993            scan:       a scantable
994            mask:       an optional mask
995            order:      the order of the polynomial (default is 0)
996            insitu:     if False a new scantable is returned.
997                        Otherwise, the scaling is done in-situ
998                        The default is taken from .asaprc (False)
999            allaxes:    If True (default) apply to all spectra. Otherwise
1000                        apply only to the selected (beam/pol/if)spectra only
1001                        The default is taken from .asaprc (True if none)
1002        Example:
1003            # return a scan baselined by a third order polynomial,
1004            # not using a mask
1005            bscan = scan.poly_baseline(order=3)
1006        """
1007        if insitu is None: insitu = rcParams['insitu']
1008        varlist = vars()
1009        if mask is None:
1010            from numarray import ones
1011            mask = list(ones(self.nchan(-1)))
1012        from asap.asapfitter import fitter
1013        f = fitter()
1014        f.set_scan(self, mask)
1015        f.set_function(poly=order)
1016        s = f.auto_fit(insitu)
1017        s._add_history("poly_baseline", varlist)
1018        print_log()
1019        if insitu: self._assign(s)
1020        else: return s
1021
1022    def auto_poly_baseline(self, mask=None, edge=(0,0), order=0,
1023                           threshold=3, insitu=None):
1024        """
1025        Return a scan which has been baselined (all rows) by a polynomial.
1026        Spectral lines are detected first using linefinder and masked out
1027        to avoid them affecting the baseline solution.
1028
1029        Parameters:
1030            mask:       an optional mask retreived from scantable
1031            edge:       an optional number of channel to drop at
1032                        the edge of spectrum. If only one value is
1033                        specified, the same number will be dropped from
1034                        both sides of the spectrum. Default is to keep
1035                        all channels. Nested tuples represent individual
1036                        edge selection for different IFs (a number of spectral
1037                        channels can be different)
1038            order:      the order of the polynomial (default is 0)
1039            threshold:  the threshold used by line finder. It is better to
1040                        keep it large as only strong lines affect the
1041                        baseline solution.
1042            insitu:     if False a new scantable is returned.
1043                        Otherwise, the scaling is done in-situ
1044                        The default is taken from .asaprc (False)
1045
1046        Example:
1047            scan2=scan.auto_poly_baseline(order=7)
1048        """
1049        if insitu is None: insitu = rcParams['insitu']
1050        varlist = vars()
1051        from asap.asapfitter import fitter
1052        from asap.asaplinefind import linefinder
1053        from asap import _is_sequence_or_number as _is_valid
1054
1055        # check whether edge is set up for each IF individually
1056        individualEdge = False;
1057        if len(edge)>1:
1058           if isinstance(edge[0],list) or isinstance(edge[0],tuple):
1059               individualEdge = True;
1060
1061        if not _is_valid(edge, int) and not individualEdge:
1062            raise ValueError, "Parameter 'edge' has to be an integer or a \
1063            pair of integers specified as a tuple. Nested tuples are allowed \
1064            to make individual selection for different IFs."
1065
1066        curedge = (0,0)
1067        if individualEdge:
1068           for edge_par in edge:
1069               if not _is_valid(edge,int):
1070                  raise ValueError, "Each element of the 'edge' tuple has \
1071                  to be a pair of integers or an integer."
1072        else:
1073           curedge = edge;
1074
1075        # setup fitter
1076        f = fitter()
1077        f.set_function(poly=order)
1078
1079        # setup line finder
1080        fl=linefinder()
1081        fl.set_options(threshold=threshold)
1082
1083        if not insitu:
1084            workscan=self.copy()
1085        else:
1086            workscan=self
1087
1088        fl.set_scan(workscan)
1089
1090        rows=range(workscan.nrow())
1091        from asap import asaplog
1092        asaplog.push("Processing:")
1093        for r in rows:
1094            msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" %        (workscan.getscan(r),workscan.getbeam(r),workscan.getif(r),workscan.getpol(r), workscan.getcycle(r))
1095            asaplog.push(msg, False)
1096
1097            # figure out edge parameter
1098            if individualEdge:
1099               if len(edge)>=workscan.getif(r):
1100                  raise RuntimeError, "Number of edge elements appear to be less than the number of IFs"
1101                  curedge = edge[workscan.getif(r)]
1102
1103            # setup line finder
1104            fl.find_lines(r,mask,curedge)
1105            f.set_scan(workscan, fl.get_mask())
1106            f.x = workscan._getabcissa(r)
1107            f.y = workscan._getspectrum(r)
1108            f.data = None
1109            f.fit()
1110            x = f.get_parameters()
1111            workscan._setspectrum(f.fitter.getresidual(), r)
1112        workscan._add_history("poly_baseline", varlist)
1113        if insitu:
1114            self._assign(workscan)
1115        else:
1116            return workscan
1117
1118    def rotate_linpolphase(self, angle):
1119        """
1120        Rotate the phase of the complex polarization O=Q+iU correlation.
1121        This is always done in situ in the raw data.  So if you call this
1122        function more than once then each call rotates the phase further.
1123        Parameters:
1124            angle:   The angle (degrees) to rotate (add) by.
1125        Examples:
1126            scan.rotate_linpolphase(2.3)
1127        """
1128        varlist = vars()
1129        self.stm._rotate_linpolphase(self, angle)
1130        self._add_history("rotate_linpolphase", varlist)
1131        print_log()
1132        return
1133
1134
1135    def rotate_xyphase(self, angle):
1136        """
1137        Rotate the phase of the XY correlation.  This is always done in situ
1138        in the data.  So if you call this function more than once
1139        then each call rotates the phase further.
1140        Parameters:
1141            angle:   The angle (degrees) to rotate (add) by.
1142        Examples:
1143            scan.rotate_xyphase(2.3)
1144        """
1145        varlist = vars()
1146        self.stm._rotate_xyphase(self, angle, allaxes)
1147        self._add_history("rotate_xyphase", varlist)
1148        print_log()
1149        return
1150
1151    def swap_linears(self):
1152        """
1153        Swap the linear polarisations XX and YY
1154        """
1155        varlist = vars()
1156        self.stm._swap_linears(self)
1157        self._add_history("swap_linears", varlist)
1158        print_log()
1159        return
1160
1161    def invert_phase(self):
1162        """
1163        Invert the phase of the complex polarisation
1164        """
1165        varlist = vars()
1166        self.stm._invert_phase(self)
1167        self._add_history("invert_phase", varlist)
1168        print_log()
1169        return
1170
1171    def add(self, offset, insitu=None):
1172        """
1173        Return a scan where all spectra have the offset added
1174        Parameters:
1175            offset:      the offset
1176            insitu:      if False a new scantable is returned.
1177                         Otherwise, the scaling is done in-situ
1178                         The default is taken from .asaprc (False)
1179        """
1180        if insitu is None: insitu = rcParams['insitu']
1181        self._math._setinsitu(insitu)
1182        varlist = vars()
1183        s = scantable(self._math._unaryop(self, offset, "ADD", False))
1184        s._add_history("add",varlist)
1185        print_log()
1186        if insitu:
1187            self._assign(s)
1188        else:
1189            return s
1190
1191    def scale(self, factor, tsys=True, insitu=None,):
1192        """
1193        Return a scan where all spectra are scaled by the give 'factor'
1194        Parameters:
1195            factor:      the scaling factor
1196            insitu:      if False a new scantable is returned.
1197                         Otherwise, the scaling is done in-situ
1198                         The default is taken from .asaprc (False)
1199            tsys:        if True (default) then apply the operation to Tsys
1200                         as well as the data
1201        """
1202        if insitu is None: insitu = rcParams['insitu']
1203        self._math._setinsitu(insitu)
1204        varlist = vars()
1205        s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
1206        s._add_history("scale",varlist)
1207        print_log()
1208        if insitu:
1209            self._assign(s)
1210        else:
1211            return s
1212
1213    def auto_quotient(self, mode='time', preserve=True):
1214        """
1215        This function allows to build quotients automatically.
1216        It assumes the observation to have the same numer of
1217        "ons" and "offs"
1218        It will support "closest off in time" in the future
1219        Parameters:
1220            mode:           the on/off detection mode; 'suffix' (default)
1221                            'suffix' identifies 'off' scans by the
1222                            trailing '_R' (Mopra/Parkes) or
1223                            '_e'/'_w' (Tid)
1224            preserve:       you can preserve (default) the continuum or
1225                            remove it.  The equations used are
1226                            preserve: Output = Toff * (on/off) - Toff
1227                            remove:   Output = Tref * (on/off) - Ton
1228        """
1229        modes = ["time"]
1230        if not mode in modes:
1231            msg = "please provide valid mode. Valid modes are %s" % (modes)
1232            raise ValueError(msg)
1233        varlist = vars()
1234        s = scantable(self._math._quotient(self, mode, preserve))
1235        s._add_history("auto_quotient",varlist)
1236        print_log()
1237        return s
1238
1239
1240
1241
1242    def freq_switch(self, insitu=None):
1243        """
1244        Apply frequency switching to the data.
1245        Parameters:
1246            insitu:      if False a new scantable is returned.
1247                         Otherwise, the swictching is done in-situ
1248                         The default is taken from .asaprc (False)
1249        Example:
1250            none
1251        """
1252        if insitu is None: insitu = rcParams['insitu']
1253        self._math._setinsitu(insitu)
1254        varlist = vars()
1255        s = scantable(self._math._freqswitch(self))
1256        s._add_history("freq_switch",varlist)
1257        print_log()
1258        if insitu: self._assign(s)
1259        else: return s
1260
1261    def recalc_azel(self):
1262        """
1263        Recalculate the azimuth and elevation for each position.
1264        Parameters:
1265            none
1266        Example:
1267        """
1268        varlist = vars()
1269        self._recalcazel()
1270        self._add_history("recalc_azel", varlist)
1271        print_log()
1272        return
1273
1274    def __add__(self, other):
1275        varlist = vars()
1276        s = None
1277        if isinstance(other, scantable):
1278            print "scantable + scantable NYI"
1279            return
1280        elif isinstance(other, float):
1281            s = scantable(self._math._unaryop(self, other, "ADD", False))
1282        else:
1283            raise TypeError("Other input is not a scantable or float value")
1284        s._add_history("operator +", varlist)
1285        print_log()
1286        return s
1287
1288    def __sub__(self, other):
1289        """
1290        implicit on all axes and on Tsys
1291        """
1292        varlist = vars()
1293        s = None
1294        if isinstance(other, scantable):
1295            print "scantable - scantable NYI"
1296            return
1297        elif isinstance(other, float):
1298            s = scantable(self._math._unaryop(self, other, "SUB", False))
1299        else:
1300            raise TypeError("Other input is not a scantable or float value")
1301        s._add_history("operator -", varlist)
1302        print_log()
1303        return s
1304
1305    def __mul__(self, other):
1306        """
1307        implicit on all axes and on Tsys
1308        """
1309        varlist = vars()
1310        s = None
1311        if isinstance(other, scantable):
1312            print "scantable * scantable NYI"
1313            return
1314        elif isinstance(other, float):
1315            s = scantable(self._math._unaryop(self, other, "MUL", False))
1316        else:
1317            raise TypeError("Other input is not a scantable or float value")
1318        s._add_history("operator *", varlist)
1319        print_log()
1320        return s
1321
1322
1323    def __div__(self, other):
1324        """
1325        implicit on all axes and on Tsys
1326        """
1327        varlist = vars()
1328        s = None
1329        if isinstance(other, scantable):
1330            print "scantable / scantable NYI"
1331            return
1332        elif isinstance(other, float):
1333            if other == 0.0:
1334                raise ZeroDivisionError("Dividing by zero is not recommended")
1335            s = scantable(self._math._unaryop(self, other, "DIV", False))
1336        else:
1337            raise TypeError("Other input is not a scantable or float value")
1338        s._add_history("operator /", varlist)
1339        print_log()
1340        return s
1341
1342    def get_fit(self, row=0):
1343        """
1344        Print or return the stored fits for a row in the scantable
1345        Parameters:
1346            row:    the row which the fit has been applied to.
1347        """
1348        if row > self.nrow():
1349            return
1350        from asap import asapfit
1351        fit = asapfit(self._getfit(row))
1352        if rcParams['verbose']:
1353            print fit
1354            return
1355        else:
1356            return fit.as_dict()
1357
1358    def _add_history(self, funcname, parameters):
1359        # create date
1360        sep = "##"
1361        from datetime import datetime
1362        dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1363        hist = dstr+sep
1364        hist += funcname+sep#cdate+sep
1365        if parameters.has_key('self'): del parameters['self']
1366        for k,v in parameters.iteritems():
1367            if type(v) is dict:
1368                for k2,v2 in v.iteritems():
1369                    hist += k2
1370                    hist += "="
1371                    if isinstance(v2,scantable):
1372                        hist += 'scantable'
1373                    elif k2 == 'mask':
1374                        if isinstance(v2,list) or isinstance(v2,tuple):
1375                            hist += str(self._zip_mask(v2))
1376                        else:
1377                            hist += str(v2)
1378                    else:
1379                        hist += str(v2)
1380            else:
1381                hist += k
1382                hist += "="
1383                if isinstance(v,scantable):
1384                    hist += 'scantable'
1385                elif k == 'mask':
1386                    if isinstance(v,list) or isinstance(v,tuple):
1387                        hist += str(self._zip_mask(v))
1388                    else:
1389                        hist += str(v)
1390                else:
1391                    hist += str(v)
1392            hist += sep
1393        hist = hist[:-2] # remove trailing '##'
1394        self._addhistory(hist)
1395
1396
1397    def _zip_mask(self, mask):
1398        mask = list(mask)
1399        i = 0
1400        segments = []
1401        while mask[i:].count(1):
1402            i += mask[i:].index(1)
1403            if mask[i:].count(0):
1404                j = i + mask[i:].index(0)
1405            else:
1406                j = len(mask)
1407            segments.append([i,j])
1408            i = j
1409        return segments
1410
1411    def _get_ordinate_label(self):
1412        fu = "("+self.get_fluxunit()+")"
1413        import re
1414        lbl = "Intensity"
1415        if re.match(".K.",fu):
1416            lbl = "Brightness Temperature "+ fu
1417        elif re.match(".Jy.",fu):
1418            lbl = "Flux density "+ fu
1419        return lbl
1420
1421    def _check_ifs(self):
1422        nchans = [self.nchan(i) for i in range(self.nif(-1))]
1423        nchans = filter(lambda t: t > 0, nchans)
1424        return (sum(nchans)/len(nchans) == nchans[0])
Note: See TracBrowser for help on using the repository browser.