source: trunk/python/scantable.py @ 976

Last change on this file since 976 was 976, checked in by mar637, 18 years ago

Ticket #12 - read of multiple rpf files into one scantable.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 53.5 KB
Line 
1from asap._asap import Scantable
2from asap import rcParams
3from asap import print_log, asaplog
4from asap import selector
5from numarray import ones,zeros
6import sys
7
8class scantable(Scantable):
9    """
10        The ASAP container for scans
11    """
12
13    def __init__(self, filename, average=None, unit=None):
14        """
15        Create a scantable from a saved one or make a reference
16        Parameters:
17            filename:    the name of an asap table on disk
18                         or
19                         the name of a rpfits/sdfits/ms file
20                         (integrations within scans are auto averaged
21                         and the whole file is read)
22                         or
23                         [advanced] a reference to an existing
24                         scantable
25            average:     average all integrations withinb a scan on read.
26                         The default (True) is taken from .asaprc.
27            unit:         brightness unit; must be consistent with K or Jy.
28                         Over-rides the default selected by the reader
29                         (input rpfits/sdfits/ms) or replaces the value
30                         in existing scantables
31        """
32        if average is None:
33            average = rcParams['scantable.autoaverage']
34        varlist = vars()
35        from asap._asap import stmath
36        self._math = stmath()
37        if isinstance(filename, Scantable):
38            Scantable.__init__(self, filename)
39        else:
40            if isinstance(filename,str):
41                import os.path
42                filename = os.path.expandvars(filename)
43                filename = os.path.expanduser(filename)
44                if not os.path.exists(filename):
45                    s = "File '%s' not found." % (filename)
46                    if rcParams['verbose']:
47                        asaplog.push(s)
48                        print asaplog.pop().strip()
49                        return
50                    raise IOError(s)
51                if os.path.isdir(filename):
52                    # crude check if asap table
53                    if os.path.exists(filename+'/table.info'):
54                        Scantable.__init__(self, filename, "memory")
55                        if unit is not None:
56                            self.set_fluxunit(unit)
57                        self.set_freqframe(rcParams['scantable.freqframe'])
58                    else:
59                        msg = "The given file '%s'is not a valid asap table." % (filename)
60                        if rcParams['verbose']:
61                            print msg
62                            return
63                        else:
64                            raise IOError(msg)
65                else:
66                    self._fill([filename],unit, average)
67            elif (isinstance(filename,list) or isinstance(filename,tuple)) \
68                  and isinstance(filename[-1], str):
69                self._fill(filename, unit, average)
70        print_log()
71
72    def save(self, name=None, format=None, overwrite=False):
73        """
74        Store the scantable on disk. This can be an asap (aips++) Table, SDFITS,
75        Image FITS or MS2 format.
76        Parameters:
77            name:        the name of the outputfile. For format="FITS" this
78                         is the directory file name into which all the files
79                         will be written (default is 'asap_FITS'). For format
80                         "ASCII" this is the root file name (data in 'name'.txt
81                         and header in 'name'_header.txt)
82            format:      an optional file format. Default is ASAP.
83                         Allowed are - 'ASAP' (save as ASAP [aips++] Table),
84                                       'SDFITS' (save as SDFITS file)
85                                       'FITS' (saves each row as a FITS Image)
86                                       'ASCII' (saves as ascii text file)
87                                       'MS2' (saves as an aips++
88                                              MeasurementSet V2)
89            overwrite:   If the file should be overwritten if it exists.
90                         The default False is to return with warning
91                         without writing the output. USE WITH CARE.
92        Example:
93            scan.save('myscan.asap')
94            scan.save('myscan.sdfits','SDFITS')
95        """
96        from os import path
97        if format is None: format = rcParams['scantable.save']
98        suffix = '.'+format.lower()
99        if name is None or name =="":
100            name = 'scantable'+suffix
101            from asap import asaplog
102            msg = "No filename given. Using default name %s..." % name
103            asaplog.push(msg)
104        name = path.expandvars(name)
105        if path.isfile(name) or path.isdir(name):
106            if not overwrite:
107                msg = "File %s exists." % name
108                if rcParams['verbose']:
109                    print msg
110                    return
111                else:
112                    raise IOError(msg)
113        format2 = format.upper()
114        if format2 == 'ASAP':
115            self._save(name)
116        else:
117            print "NYI"
118#             from asap._asap import sdwriter as _sw
119#             w = _sw(format2)
120#             w.write(self, name, stokes)
121
122        print_log()
123        return
124
125    def copy(self):
126        """
127        Return a copy of this scantable.
128        Parameters:
129            none
130        Example:
131            copiedscan = scan.copy()
132        """
133        sd = scantable(Scantable._copy(self))
134        return sd
135
136    def get_scan(self, scanid=None):
137        """
138        Return a specific scan (by scanno) or collection of scans (by
139        source name) in a new scantable.
140        Parameters:
141            scanid:    a (list of) scanno or a source name, unix-style
142                       patterns are accepted for source name matching, e.g.
143                       '*_R' gets all 'ref scans
144        Example:
145            # get all scans containing the source '323p459'
146            newscan = scan.get_scan('323p459')
147            # get all 'off' scans
148            refscans = scan.get_scan('*_R')
149            # get a susbset of scans by scanno (as listed in scan.summary())
150            newscan = scan.get_scan([0,2,7,10])
151        """
152        if scanid is None:
153            if rcParams['verbose']:
154                print "Please specify a scan no or name to retrieve from the scantable"
155                return
156            else:
157                raise RuntimeError("No scan given")
158
159        try:
160            bsel = self.get_selection()
161            sel = selector()
162            if type(scanid) is str:
163                sel.set_name(scanid)
164                self.set_selection(bsel+sel)
165                scopy = self._copy()
166                self.set_selection(bsel)
167                return scantable(scopy)
168            elif type(scanid) is int:
169                sel.set_scans([scanid])
170                self.set_selection(bsel+sel)
171                scopy = self._copy()
172                self.set_selection(bsel)
173                return scantable(scopy)
174            elif type(scanid) is list:
175                sel.set_scans(scanid)
176                self.set_selection(sel)
177                scopy = self._copy()
178                self.set_selection(bsel)
179                return scantable(scopy)
180            else:
181                msg = "Illegal scanid type, use 'int' or 'list' if ints."
182                if rcParams['verbose']:
183                    print msg
184                else:
185                    raise TypeError(msg)
186        except RuntimeError:
187            if rcParams['verbose']: print "Couldn't find any match."
188            else: raise
189
190    def __str__(self):
191        return Scantable._summary(self,True)
192
193    def summary(self, filename=None):
194        """
195        Print a summary of the contents of this scantable.
196        Parameters:
197            filename:    the name of a file to write the putput to
198                         Default - no file output
199            verbose:     print extra info such as the frequency table
200                         The default (False) is taken from .asaprc
201        """
202        info = Scantable._summary(self, True)
203        #if verbose is None: verbose = rcParams['scantable.verbosesummary']
204        if filename is not None:
205            if filename is "":
206                filename = 'scantable_summary.txt'
207            from os.path import expandvars, isdir
208            filename = expandvars(filename)
209            if not isdir(filename):
210                data = open(filename, 'w')
211                data.write(info)
212                data.close()
213            else:
214                msg = "Illegal file name '%s'." % (filename)
215                if rcParams['verbose']:
216                    print msg
217                else:
218                    raise IOError(msg)
219        if rcParams['verbose']:
220            try:
221                from IPython.genutils import page as pager
222            except ImportError:
223                from pydoc import pager
224            pager(info)
225        else:
226            return info
227
228
229    def get_selection(self):
230        """
231        """
232        return selector(self._getselection())
233
234    def set_selection(self, selection):
235        """
236        """
237        self._setselection(selection)
238
239    def set_cursor(self, beam=0, IF=0, pol=0):
240        """
241        Set the spectrum for individual operations.
242        Parameters:
243            beam, IF, pol:    a number
244        Example:
245            scan.set_cursor(0,0,1)
246            pol1sig = scan.stats(all=False) # returns std dev for beam=0
247                                            # if=0, pol=1
248        """
249        print "DEPRECATED"
250        varlist = vars()
251        sel = asap._asap.Selector()
252        sel._setbeams([beam])
253        sel._setpols([pol])
254        sel._setifs([IF])
255        self._add_history("set_cursor", varlist)
256        return
257
258    def get_cursor(self):
259        """
260        Return/print a the current 'cursor' into the Beam/IF/Pol cube.
261        Parameters:
262            none
263        Returns:
264            a list of values (currentBeam,currentIF,currentPol)
265        Example:
266            none
267        """
268        print "DEPRECATED"
269        sel = self._getselection()
270        i = sel.getbeams()[0]
271        j = sel.getifs()[0]
272        k = sel.getpols()[0]
273        from asap import asaplog
274        out = "--------------------------------------------------\n"
275        out += " Cursor position\n"
276        out += "--------------------------------------------------\n"
277        out += 'Beam=%d IF=%d Pol=%d ' % (i,j,k)
278        asaplog.push(out)
279        print_log()
280        return i,j,k
281
282    def stats(self, stat='stddev', mask=None):
283        """
284        Determine the specified statistic of the current beam/if/pol
285        Takes a 'mask' as an optional parameter to specify which
286        channels should be excluded.
287        Parameters:
288            stat:    'min', 'max', 'sumsq', 'sum', 'mean'
289                     'var', 'stddev', 'avdev', 'rms', 'median'
290            mask:    an optional mask specifying where the statistic
291                     should be determined.
292        Example:
293            scan.set_unit('channel')
294            msk = scan.create_mask([100,200],[500,600])
295            scan.stats(stat='mean', mask=m)
296        """
297        from numarray import array,zeros,Float
298        if mask == None:
299            mask = []
300        axes = ['Beam','IF','Pol','Time']
301        if not self._check_ifs():
302             raise ValueError("Cannot apply mask as the IFs have different number of channels"
303                              "Please use setselection() to select individual IFs")
304
305        statvals = self._math._stats(self, mask, stat)
306        out = ''
307        axes = []
308        for i in range(self.nrow()):
309            axis = []
310            axis.append(self.getscan(i))
311            axis.append(self.getbeam(i))
312            axis.append(self.getif(i))
313            axis.append(self.getpol(i))
314            axis.append(self.getcycle(i))
315            axes.append(axis)
316            tm = self._gettime(i)
317            src = self._getsourcename(i)
318            out += 'Scan[%d] (%s) ' % (axis[0], src)
319            out += 'Time[%s]:\n' % (tm)
320            if self.nbeam(-1) > 1: out +=  ' Beam[%d] ' % (axis[1])
321            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (axis[2])
322            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (axis[3])
323            out += '= %3.3f\n' % (statvals[i])
324            out +=  "--------------------------------------------------\n"
325
326        if rcParams['verbose']:
327            print "--------------------------------------------------"
328            print " ",stat
329            print "--------------------------------------------------"
330            print out
331        retval = { 'axesnames': ['scanno','beamno','ifno','polno','cycleno'],
332                   'axes' : axes,
333                   'data': statvals}
334        return retval
335
336    def stddev(self,mask=None):
337        """
338        Determine the standard deviation of the current beam/if/pol
339        Takes a 'mask' as an optional parameter to specify which
340        channels should be excluded.
341        Parameters:
342            mask:    an optional mask specifying where the standard
343                     deviation should be determined.
344
345        Example:
346            scan.set_unit('channel')
347            msk = scan.create_mask([100,200],[500,600])
348            scan.stddev(mask=m)
349        """
350        return self.stats(stat='stddev',mask=mask);
351
352    def get_tsys(self):
353        """
354        Return the System temperatures.
355        Parameters:
356
357        Returns:
358            a list of Tsys values for the current selection
359        """
360
361        return self._row_callback(self._gettsys, "Tsys")
362
363    def _row_callback(self, callback, label):
364        axes = []
365        axesnames = ['scanno','beamno','ifno','polno','cycleno']
366        out = ""
367        outvec =[]
368        for i in range(self.nrow()):
369            axis = []
370            axis.append(self.getscan(i))
371            axis.append(self.getbeam(i))
372            axis.append(self.getif(i))
373            axis.append(self.getpol(i))
374            axis.append(self.getcycle(i))
375            axes.append(axis)
376            tm = self._gettime(i)
377            src = self._getsourcename(i)
378            out += 'Scan[%d] (%s) ' % (axis[0], src)
379            out += 'Time[%s]:\n' % (tm)
380            if self.nbeam(-1) > 1: out +=  ' Beam[%d] ' % (axis[1])
381            if self.nif(-1) > 1: out +=  ' IF[%d] ' % (axis[2])
382            if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (axis[3])
383            outvec.append(callback(i))
384            out += '= %3.3f\n' % (outvec[i])
385            out +=  "--------------------------------------------------\n"
386        if rcParams['verbose']:
387            print "--------------------------------------------------"
388            print " %s" % (label)
389            print "--------------------------------------------------"
390            print out
391        retval = {'axesnames': axesnames, 'axes': axes, 'data': outvec}
392        return retval
393
394
395    def get_time(self, row=-1):
396        """
397        Get a list of time stamps for the observations.
398        Return a string for each integration in the scantable.
399        Parameters:
400            row:    row no of integration. Default -1 return all rows
401        Example:
402            none
403        """
404        out = []
405        if row == -1:
406            for i in range(self.nrow()):
407                out.append(self._gettime(i))
408            return out
409        else:
410            if row < self.nrow():
411                return self._gettime(row)
412
413    def get_sourcename(self, row=-1):
414        """
415        Get a list source names for the observations.
416        Return a string for each integration in the scantable.
417        Parameters:
418            row:    row no of integration. Default -1 return all rows
419        Example:
420            none
421        """
422        out = []
423        if row == -1:
424            return [self._getsourcename(i) for i in range(self.nrow())]
425        else:
426            if  0 <= row < self.nrow():
427                return self._getsourcename(row)
428
429    def get_elevation(self, row=-1):
430        """
431        Get a list of elevations for the observations.
432        Return a float for each integration in the scantable.
433        Parameters:
434            row:    row no of integration. Default -1 return all rows
435        Example:
436            none
437        """
438        out = []
439        if row == -1:
440            return [self._getelevation(i) for i in range(self.nrow())]
441        else:
442            if  0 <= row < self.nrow():
443                return self._getelevation(row)
444
445    def get_azimuth(self, row=-1):
446        """
447        Get a list of azimuths for the observations.
448        Return a float for each integration in the scantable.
449        Parameters:
450            row:    row no of integration. Default -1 return all rows
451        Example:
452            none
453        """
454        out = []
455        if row == -1:
456            return [self._getazimuth(i) for i in range(self.nrow())]
457        else:
458            if  0 <= row < self.nrow():
459                return self._getazimuth(row)
460
461    def get_parangle(self, row=-1):
462        """
463        Get a list of parallactic angles for the observations.
464        Return a float for each integration in the scantable.
465        Parameters:
466            row:    row no of integration. Default -1 return all rows
467        Example:
468            none
469        """
470        out = []
471        if row == -1:
472            return [self._getparangle(i) for i in range(self.nrow())]
473        else:
474            if  0 <= row < self.nrow():
475                return self._getparangle(row)
476
477    def set_unit(self, unit='channel'):
478        """
479        Set the unit for all following operations on this scantable
480        Parameters:
481            unit:    optional unit, default is 'channel'
482                     one of '*Hz','km/s','channel', ''
483        """
484        varlist = vars()
485        if unit in ['','pixel', 'channel']:
486            unit = ''
487        inf = list(self._getcoordinfo())
488        inf[0] = unit
489        self._setcoordinfo(inf)
490        self._add_history("set_unit",varlist)
491
492    def set_instrument(self, instr):
493        """
494        Set the instrument for subsequent processing
495        Parameters:
496            instr:    Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
497                      'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
498        """
499        self._setInstrument(instr)
500        self._add_history("set_instument",vars())
501        print_log()
502
503    def set_doppler(self, doppler='RADIO'):
504        """
505        Set the doppler for all following operations on this scantable.
506        Parameters:
507            doppler:    One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
508        """
509        varlist = vars()
510        inf = list(self._getcoordinfo())
511        inf[2] = doppler
512        self._setcoordinfo(inf)
513        self._add_history("set_doppler",vars())
514        print_log()
515
516    def set_freqframe(self, frame=None):
517        """
518        Set the frame type of the Spectral Axis.
519        Parameters:
520            frame:   an optional frame type, default 'LSRK'. Valid frames are:
521                     'REST','TOPO','LSRD','LSRK','BARY',
522                     'GEO','GALACTO','LGROUP','CMB'
523        Examples:
524            scan.set_freqframe('BARY')
525        """
526        if frame is None: frame = rcParams['scantable.freqframe']
527        varlist = vars()
528        valid = ['REST','TOPO','LSRD','LSRK','BARY', \
529                   'GEO','GALACTO','LGROUP','CMB']
530
531        if 1:#frame in valid:
532            inf = list(self._getcoordinfo())
533            inf[1] = frame
534            self._setcoordinfo(inf)
535            self._add_history("set_freqframe",varlist)
536        else:
537            msg  = "Please specify a valid freq type. Valid types are:\n",valid
538            if rcParams['verbose']:
539                print msg
540            else:
541                raise TypeError(msg)
542        print_log()
543
544    def get_unit(self):
545        """
546        Get the default unit set in this scantable
547        Parameters:
548        Returns:
549            A unit string
550        """
551        inf = self._getcoordinfo()
552        unit = inf[0]
553        if unit == '': unit = 'channel'
554        return unit
555
556    def get_abcissa(self, rowno=0):
557        """
558        Get the abcissa in the current coordinate setup for the currently
559        selected Beam/IF/Pol
560        Parameters:
561            rowno:    an optional row number in the scantable. Default is the
562                      first row, i.e. rowno=0
563        Returns:
564            The abcissa values and it's format string (as a dictionary)
565        """
566        abc = self._getabcissa(rowno)
567        lbl = self._getabcissalabel(rowno)
568        print_log()
569        return abc, lbl
570
571    def create_mask(self, *args, **kwargs):
572        """
573        Compute and return a mask based on [min,max] windows.
574        The specified windows are to be INCLUDED, when the mask is
575        applied.
576        Parameters:
577            [min,max],[min2,max2],...
578                Pairs of start/end points specifying the regions
579                to be masked
580            invert:     optional argument. If specified as True,
581                        return an inverted mask, i.e. the regions
582                        specified are EXCLUDED
583            row:        create the mask using the specified row for
584                        unit conversions, default is row=0
585                        only necessary if frequency varies over rows.
586        Example:
587            scan.set_unit('channel')
588
589            a)
590            msk = scan.set_mask([400,500],[800,900])
591            # masks everything outside 400 and 500
592            # and 800 and 900 in the unit 'channel'
593
594            b)
595            msk = scan.set_mask([400,500],[800,900], invert=True)
596            # masks the regions between 400 and 500
597            # and 800 and 900 in the unit 'channel'
598
599        """
600        row = 0
601        if kwargs.has_key("row"):
602            row = kwargs.get("row")
603        data = self._getabcissa(row)
604        u = self._getcoordinfo()[0]
605        if rcParams['verbose']:
606            if u == "": u = "channel"
607            from asap import asaplog
608            msg = "The current mask window unit is %s" % u
609            if not self._check_ifs():
610                msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
611            asaplog.push(msg)
612        n = self.nchan()
613        msk = zeros(n)
614        # test if args is a 'list' or a 'normal *args - UGLY!!!
615
616        ws = (isinstance(args[-1][-1],int) or isinstance(args[-1][-1],float)) and args or args[0]
617        for window in ws:
618            if (len(window) != 2 or window[0] > window[1] ):
619                raise TypeError("A window needs to be defined as [min,max]")
620            for i in range(n):
621                if data[i] >= window[0] and data[i] < window[1]:
622                    msk[i] = 1
623        if kwargs.has_key('invert'):
624            if kwargs.get('invert'):
625                from numarray import logical_not
626                msk = logical_not(msk)
627        print_log()
628        return msk
629
630    def get_restfreqs(self):
631        """
632        Get the restfrequency(s) stored in this scantable.
633        The return value(s) are always of unit 'Hz'
634        Parameters:
635            none
636        Returns:
637            a list of doubles
638        """
639        return list(self._getrestfreqs())
640
641
642    def set_restfreqs(self, freqs=None, unit='Hz'):
643        """
644        Set or replace the restfrequency specified and
645        If the 'freqs' argument holds a scalar,
646        then that rest frequency will be applied to all the selected
647        data.  If the 'freqs' argument holds
648        a vector, then it MUST be of equal or smaller length than
649        the number of IFs (and the available restfrequencies will be
650        replaced by this vector).  In this case, *all* data have
651        the restfrequency set per IF according
652        to the corresponding value you give in the 'freqs' vector.
653        E.g. 'freqs=[1e9,2e9]'  would mean IF 0 gets restfreq 1e9 and
654        IF 1 gets restfreq 2e9.
655        You can also specify the frequencies via known line names
656        from the built-in Lovas table.
657        Parameters:
658            freqs:   list of rest frequency values or string idenitfiers
659            unit:    unit for rest frequency (default 'Hz')
660
661        Example:
662            # set the given restfrequency for the whole table
663            scan.set_restfreqs(freqs=1.4e9)
664            # If thee number of IFs in the data is >= 2 the IF0 gets the first
665            # value IF1 the second...
666            scan.set_restfreqs(freqs=[1.4e9,1.67e9])
667            #set the given restfrequency for the whole table (by name)
668            scan.set_restfreqs(freqs="OH1667")
669
670        Note:
671            To do more sophisticate Restfrequency setting, e.g. on a
672            source and IF basis, use scantable.set_selection() before using
673            this function.
674            # provide your scantable is call scan
675            selection = selector()
676            selection.set_name("ORION*")
677            selection.set_ifs([1])
678            scan.set_selection(selection)
679            scan.set_restfreqs(freqs=86.6e9)
680
681        """
682        varlist = vars()
683
684        t = type(freqs)
685        if isinstance(freqs, int) or isinstance(freqs,float):
686           self._setrestfreqs(freqs, unit)
687        elif isinstance(freqs, list) or isinstance(freqs,tuple):
688            if isinstance(freqs[-1], int) or isinstance(freqs[-1],float):
689                sel = selector()
690                savesel = self._getselection()
691                for i in xrange(len(freqs)):
692                    sel.set_ifs([i])
693                    self._setselection(sel)
694                    self._setrestfreqs(freqs[i], unit)
695                self._setselection(savesel)
696            elif isinstance(freqs[-1], str):
697                # not yet implemented
698                pass
699        else:
700            return
701        self._add_history("set_restfreqs", varlist)
702
703
704
705    def history(self):
706        hist = list(self._gethistory())
707        out = "-"*80
708        for h in hist:
709            if h.startswith("---"):
710                out += "\n"+h
711            else:
712                items = h.split("##")
713                date = items[0]
714                func = items[1]
715                items = items[2:]
716                out += "\n"+date+"\n"
717                out += "Function: %s\n  Parameters:" % (func)
718                for i in items:
719                    s = i.split("=")
720                    out += "\n   %s = %s" % (s[0],s[1])
721                out += "\n"+"-"*80
722        try:
723            from IPython.genutils import page as pager
724        except ImportError:
725            from pydoc import pager
726        pager(out)
727        return
728
729    #
730    # Maths business
731    #
732
733    def average_time(self, mask=None, scanav=False, weight='tint', align=False):
734        """
735        Return the (time) average of a scan, or apply it 'insitu'.
736        Note:
737            in channels only
738            The cursor of the output scan is set to 0.
739        Parameters:
740            one scan or comma separated  scans
741            mask:     an optional mask (only used for 'var' and 'tsys'
742                      weighting)
743            scanav:   True averages each scan separately
744                      False (default) averages all scans together,
745            weight:   Weighting scheme. 'none', 'var' (1/var(spec)
746                      weighted), 'tsys' (1/Tsys**2 weighted), 'tint'
747                      (integration time weighted) or 'tintsys' (Tint/Tsys**2).
748                      The default is 'tint'
749            align:    align the spectra in velocity before averaging. It takes
750                      the time of the first spectrum as reference time.
751        Example:
752            # time average the scantable without using a mask
753            newscan = scan.average_time()
754        """
755        varlist = vars()
756        if weight is None: weight = 'TINT'
757        if mask is None: mask = ()
758        if scanav:
759          scanav = "SCAN"
760        else:
761          scanav = "NONE"
762        s = scantable(self._math._average((self,), mask, weight.upper(),
763                      scanav, align))
764        s._add_history("average_time",varlist)
765        print_log()
766        return s
767
768    def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
769        """
770        Return a scan where all spectra are converted to either
771        Jansky or Kelvin depending upon the flux units of the scan table.
772        By default the function tries to look the values up internally.
773        If it can't find them (or if you want to over-ride), you must
774        specify EITHER jyperk OR eta (and D which it will try to look up
775        also if you don't set it). jyperk takes precedence if you set both.
776        Parameters:
777            jyperk:      the Jy / K conversion factor
778            eta:         the aperture efficiency
779            d:           the geomtric diameter (metres)
780            insitu:      if False a new scantable is returned.
781                         Otherwise, the scaling is done in-situ
782                         The default is taken from .asaprc (False)
783            allaxes:         if True apply to all spectra. Otherwise
784                         apply only to the selected (beam/pol/if)spectra only
785                         The default is taken from .asaprc (True if none)
786        """
787        if insitu is None: insitu = rcParams['insitu']
788        self._math._setinsitu(insitu)
789        varlist = vars()
790        if jyperk is None: jyperk = -1.0
791        if d is None: d = -1.0
792        if eta is None: eta = -1.0
793        s = scantable(self._math._convertflux(self, d, eta, jyperk))
794        s._add_history("convert_flux", varlist)
795        print_log()
796        if insitu: self._assign(s)
797        else: return s
798
799    def gain_el(self, poly=None, filename="", method="linear", insitu=None):
800        """
801        Return a scan after applying a gain-elevation correction.
802        The correction can be made via either a polynomial or a
803        table-based interpolation (and extrapolation if necessary).
804        You specify polynomial coefficients, an ascii table or neither.
805        If you specify neither, then a polynomial correction will be made
806        with built in coefficients known for certain telescopes (an error
807        will occur if the instrument is not known).
808        The data and Tsys are *divided* by the scaling factors.
809        Parameters:
810            poly:        Polynomial coefficients (default None) to compute a
811                         gain-elevation correction as a function of
812                         elevation (in degrees).
813            filename:    The name of an ascii file holding correction factors.
814                         The first row of the ascii file must give the column
815                         names and these MUST include columns
816                         "ELEVATION" (degrees) and "FACTOR" (multiply data
817                         by this) somewhere.
818                         The second row must give the data type of the
819                         column. Use 'R' for Real and 'I' for Integer.
820                         An example file would be
821                         (actual factors are arbitrary) :
822
823                         TIME ELEVATION FACTOR
824                         R R R
825                         0.1 0 0.8
826                         0.2 20 0.85
827                         0.3 40 0.9
828                         0.4 60 0.85
829                         0.5 80 0.8
830                         0.6 90 0.75
831            method:      Interpolation method when correcting from a table.
832                         Values are  "nearest", "linear" (default), "cubic"
833                         and "spline"
834            insitu:      if False a new scantable is returned.
835                         Otherwise, the scaling is done in-situ
836                         The default is taken from .asaprc (False)
837        """
838
839        if insitu is None: insitu = rcParams['insitu']
840        self._math._setinsitu(insitu)
841        varlist = vars()
842        if poly is None:
843           poly = ()
844        from os.path import expandvars
845        filename = expandvars(filename)
846        s = scantable(self._math._gainel(self, poly, filename, method))
847        s._add_history("gain_el", varlist)
848        print_log()
849        if insitu: self._assign(s)
850        else: return s
851
852    def freq_align(self, reftime=None, method='cubic', insitu=None):
853        """
854        Return a scan where all rows have been aligned in frequency/velocity.
855        The alignment frequency frame (e.g. LSRK) is that set by function
856        set_freqframe.
857        Parameters:
858            reftime:     reference time to align at. By default, the time of
859                         the first row of data is used.
860            method:      Interpolation method for regridding the spectra.
861                         Choose from "nearest", "linear", "cubic" (default)
862                         and "spline"
863            insitu:      if False a new scantable is returned.
864                         Otherwise, the scaling is done in-situ
865                         The default is taken from .asaprc (False)
866        """
867        if insitu is None: insitu = rcParams["insitu"]
868        self._math._setinsitu(insitu)
869        varlist = vars()
870        if reftime is None: reftime = ""
871        s = scantable(self._math._freq_align(self, reftime, method))
872        s._add_history("freq_align", varlist)
873        print_log()
874        if insitu: self._assign(s)
875        else: return s
876
877    def opacity(self, tau, insitu=None):
878        """
879        Apply an opacity correction. The data
880        and Tsys are multiplied by the correction factor.
881        Parameters:
882            tau:         Opacity from which the correction factor is
883                         exp(tau*ZD)
884                         where ZD is the zenith-distance
885            insitu:      if False a new scantable is returned.
886                         Otherwise, the scaling is done in-situ
887                         The default is taken from .asaprc (False)
888        """
889        if insitu is None: insitu = rcParams['insitu']
890        self._math._setinsitu(insitu)
891        varlist = vars()
892        s = scantable(self._math._opacity(self, tau))
893        s._add_history("opacity", varlist)
894        print_log()
895        if insitu: self._assign(s)
896        else: return s
897
898    def bin(self, width=5, insitu=None):
899        """
900        Return a scan where all spectra have been binned up.
901            width:       The bin width (default=5) in pixels
902            insitu:      if False a new scantable is returned.
903                         Otherwise, the scaling is done in-situ
904                         The default is taken from .asaprc (False)
905        """
906        if insitu is None: insitu = rcParams['insitu']
907        self._math._setinsitu(insitu)
908        varlist = vars()
909        s = scantable(self._math._bin(self, width))
910        s._add_history("bin",varlist)
911        print_log()
912        if insitu: self._assign(s)
913        else: return s
914
915
916    def resample(self, width=5, method='cubic', insitu=None):
917        """
918        Return a scan where all spectra have been binned up
919            width:       The bin width (default=5) in pixels
920            method:      Interpolation method when correcting from a table.
921                         Values are  "nearest", "linear", "cubic" (default)
922                         and "spline"
923            insitu:      if False a new scantable is returned.
924                         Otherwise, the scaling is done in-situ
925                         The default is taken from .asaprc (False)
926        """
927        if insitu is None: insitu = rcParams['insitu']
928        self._math._setinsitu(insitu)
929        varlist = vars()
930        s = scantable(self._math._resample(self, method, width))
931        s._add_history("resample",varlist)
932        print_log()
933        if insitu: self._assign(s)
934        else: return s
935
936
937    def average_pol(self, mask=None, weight='none'):
938        """
939        Average the Polarisations together.
940        Parameters:
941            mask:        An optional mask defining the region, where the
942                         averaging will be applied. The output will have all
943                         specified points masked.
944            weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
945                         weighted), or 'tsys' (1/Tsys**2 weighted)
946        """
947        varlist = vars()
948        if mask is None:
949            mask = ()
950        s = self._math._averagepol(self, mask, weight)
951        s._add_history("average_pol",varlist)
952        print_log()
953        return scantable(s)
954
955    def smooth(self, kernel="hanning", width=5.0, insitu=None):
956        """
957        Smooth the spectrum by the specified kernel (conserving flux).
958        Parameters:
959            scan:       The input scan
960            kernel:     The type of smoothing kernel. Select from
961                        'hanning' (default), 'gaussian' and 'boxcar'.
962                        The first three characters are sufficient.
963            width:      The width of the kernel in pixels. For hanning this is
964                        ignored otherwise it defauls to 5 pixels.
965                        For 'gaussian' it is the Full Width Half
966                        Maximum. For 'boxcar' it is the full width.
967            insitu:     if False a new scantable is returned.
968                        Otherwise, the scaling is done in-situ
969                        The default is taken from .asaprc (False)
970        Example:
971             none
972        """
973        if insitu is None: insitu = rcParams['insitu']
974        self._math._setinsitu(insitu)
975        varlist = vars()
976        s = scantable(self._math._smooth(self,kernel,width))
977        s._add_history("smooth", varlist)
978        print_log()
979        if insitu: self._assign(s)
980        else: return s
981
982
983    def poly_baseline(self, mask=None, order=0, insitu=None):
984        """
985        Return a scan which has been baselined (all rows) by a polynomial.
986        Parameters:
987            scan:       a scantable
988            mask:       an optional mask
989            order:      the order of the polynomial (default is 0)
990            insitu:     if False a new scantable is returned.
991                        Otherwise, the scaling is done in-situ
992                        The default is taken from .asaprc (False)
993            allaxes:    If True (default) apply to all spectra. Otherwise
994                        apply only to the selected (beam/pol/if)spectra only
995                        The default is taken from .asaprc (True if none)
996        Example:
997            # return a scan baselined by a third order polynomial,
998            # not using a mask
999            bscan = scan.poly_baseline(order=3)
1000        """
1001        if insitu is None: insitu = rcParams['insitu']
1002        varlist = vars()
1003        if mask is None:
1004            from numarray import ones
1005            mask = list(ones(self.nchan(-1)))
1006        from asap.asapfitter import fitter
1007        f = fitter()
1008        f.set_scan(self, mask)
1009        f.set_function(poly=order)
1010        s = f.auto_fit(insitu)
1011        s._add_history("poly_baseline", varlist)
1012        print_log()
1013        if insitu: self._assign(s)
1014        else: return s
1015
1016    def auto_poly_baseline(self, mask=[], edge=(0,0), order=0,
1017                           threshold=3, insitu=None):
1018        """
1019        Return a scan which has been baselined (all rows) by a polynomial.
1020        Spectral lines are detected first using linefinder and masked out
1021        to avoid them affecting the baseline solution.
1022
1023        Parameters:
1024            mask:       an optional mask retreived from scantable
1025            edge:       an optional number of channel to drop at
1026                        the edge of spectrum. If only one value is
1027                        specified, the same number will be dropped from
1028                        both sides of the spectrum. Default is to keep
1029                        all channels. Nested tuples represent individual
1030                        edge selection for different IFs (a number of spectral
1031                        channels can be different)
1032            order:      the order of the polynomial (default is 0)
1033            threshold:  the threshold used by line finder. It is better to
1034                        keep it large as only strong lines affect the
1035                        baseline solution.
1036            insitu:     if False a new scantable is returned.
1037                        Otherwise, the scaling is done in-situ
1038                        The default is taken from .asaprc (False)
1039
1040        Example:
1041            scan2=scan.auto_poly_baseline(order=7)
1042        """
1043        if insitu is None: insitu = rcParams['insitu']
1044        varlist = vars()
1045        from asap.asapfitter import fitter
1046        from asap.asaplinefind import linefinder
1047        from asap import _is_sequence_or_number as _is_valid
1048
1049        # check whether edge is set up for each IF individually
1050        individualEdge = False;
1051        if len(edge)>1:
1052           if isinstance(edge[0],list) or isinstance(edge[0],tuple):
1053               individualEdge = True;
1054
1055        if not _is_valid(edge, int) and not individualEdge:
1056            raise ValueError, "Parameter 'edge' has to be an integer or a \
1057            pair of integers specified as a tuple. Nested tuples are allowed \
1058            to make individual selection for different IFs."
1059
1060        curedge = (0,0)
1061        if individualEdge:
1062           for edge_par in edge:
1063               if not _is_valid(edge,int):
1064                  raise ValueError, "Each element of the 'edge' tuple has \
1065                  to be a pair of integers or an integer."
1066        else:
1067           curedge = edge;
1068
1069        # setup fitter
1070        f = fitter()
1071        f.set_function(poly=order)
1072
1073        # setup line finder
1074        fl=linefinder()
1075        fl.set_options(threshold=threshold)
1076
1077        if not insitu:
1078            workscan=self.copy()
1079        else:
1080            workscan=self
1081
1082        fl.set_scan(workscan)
1083
1084        rows=range(workscan.nrow())
1085        from asap import asaplog
1086        asaplog.push("Processing:")
1087        for r in rows:
1088            msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" %        (workscan.getscan(r),workscan.getbeam(r),workscan.getif(r),workscan.getpol(r), workscan.getcycle(r))
1089            asaplog.push(msg, False)
1090
1091            # figure out edge parameter
1092            if individualEdge:
1093               if len(edge)>=workscan.getif(r):
1094                  raise RuntimeError, "Number of edge elements appear to be less than the number of IFs"
1095                  curedge = edge[workscan.getif(r)]
1096
1097            # setup line finder
1098            fl.find_lines(r,mask,curedge)
1099            f.set_scan(workscan, fl.get_mask())
1100            f.x = workscan._getabcissa(r)
1101            f.y = workscan._getspectrum(r)
1102            f.data = None
1103            f.fit()
1104            x = f.get_parameters()
1105            workscan._setspectrum(f.fitter.getresidual(), r)
1106        workscan._add_history("poly_baseline", varlist)
1107        if insitu:
1108            self._assign(workscan)
1109        else:
1110            return workscan
1111
1112    def rotate_linpolphase(self, angle):
1113        """
1114        Rotate the phase of the complex polarization O=Q+iU correlation.
1115        This is always done in situ in the raw data.  So if you call this
1116        function more than once then each call rotates the phase further.
1117        Parameters:
1118            angle:   The angle (degrees) to rotate (add) by.
1119        Examples:
1120            scan.rotate_linpolphase(2.3)
1121        """
1122        varlist = vars()
1123        self._math._rotate_linpolphase(self, angle)
1124        self._add_history("rotate_linpolphase", varlist)
1125        print_log()
1126        return
1127
1128
1129    def rotate_xyphase(self, angle):
1130        """
1131        Rotate the phase of the XY correlation.  This is always done in situ
1132        in the data.  So if you call this function more than once
1133        then each call rotates the phase further.
1134        Parameters:
1135            angle:   The angle (degrees) to rotate (add) by.
1136        Examples:
1137            scan.rotate_xyphase(2.3)
1138        """
1139        varlist = vars()
1140        self._math._rotate_xyphase(self, angle)
1141        self._add_history("rotate_xyphase", varlist)
1142        print_log()
1143        return
1144
1145    def swap_linears(self):
1146        """
1147        Swap the linear polarisations XX and YY
1148        """
1149        varlist = vars()
1150        self._math._swap_linears(self)
1151        self._add_history("swap_linears", varlist)
1152        print_log()
1153        return
1154
1155    def invert_phase(self):
1156        """
1157        Invert the phase of the complex polarisation
1158        """
1159        varlist = vars()
1160        self._math._invert_phase(self)
1161        self._add_history("invert_phase", varlist)
1162        print_log()
1163        return
1164
1165    def add(self, offset, insitu=None):
1166        """
1167        Return a scan where all spectra have the offset added
1168        Parameters:
1169            offset:      the offset
1170            insitu:      if False a new scantable is returned.
1171                         Otherwise, the scaling is done in-situ
1172                         The default is taken from .asaprc (False)
1173        """
1174        if insitu is None: insitu = rcParams['insitu']
1175        self._math._setinsitu(insitu)
1176        varlist = vars()
1177        s = scantable(self._math._unaryop(self, offset, "ADD", False))
1178        s._add_history("add",varlist)
1179        print_log()
1180        if insitu:
1181            self._assign(s)
1182        else:
1183            return s
1184
1185    def scale(self, factor, tsys=True, insitu=None,):
1186        """
1187        Return a scan where all spectra are scaled by the give 'factor'
1188        Parameters:
1189            factor:      the scaling factor
1190            insitu:      if False a new scantable is returned.
1191                         Otherwise, the scaling is done in-situ
1192                         The default is taken from .asaprc (False)
1193            tsys:        if True (default) then apply the operation to Tsys
1194                         as well as the data
1195        """
1196        if insitu is None: insitu = rcParams['insitu']
1197        self._math._setinsitu(insitu)
1198        varlist = vars()
1199        s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
1200        s._add_history("scale",varlist)
1201        print_log()
1202        if insitu:
1203            self._assign(s)
1204        else:
1205            return s
1206
1207    def auto_quotient(self, mode='time', preserve=True):
1208        """
1209        This function allows to build quotients automatically.
1210        It assumes the observation to have the same numer of
1211        "ons" and "offs"
1212        It will support "closest off in time" in the future
1213        Parameters:
1214            mode:           the on/off detection mode; 'suffix' (default)
1215                            'suffix' identifies 'off' scans by the
1216                            trailing '_R' (Mopra/Parkes) or
1217                            '_e'/'_w' (Tid)
1218            preserve:       you can preserve (default) the continuum or
1219                            remove it.  The equations used are
1220                            preserve: Output = Toff * (on/off) - Toff
1221                            remove:   Output = Tref * (on/off) - Ton
1222        """
1223        modes = ["time"]
1224        if not mode in modes:
1225            msg = "please provide valid mode. Valid modes are %s" % (modes)
1226            raise ValueError(msg)
1227        varlist = vars()
1228        s = scantable(self._math._quotient(self, mode, preserve))
1229        s._add_history("auto_quotient",varlist)
1230        print_log()
1231        return s
1232
1233
1234
1235
1236    def freq_switch(self, insitu=None):
1237        """
1238        Apply frequency switching to the data.
1239        Parameters:
1240            insitu:      if False a new scantable is returned.
1241                         Otherwise, the swictching is done in-situ
1242                         The default is taken from .asaprc (False)
1243        Example:
1244            none
1245        """
1246        if insitu is None: insitu = rcParams['insitu']
1247        self._math._setinsitu(insitu)
1248        varlist = vars()
1249        s = scantable(self._math._freqswitch(self))
1250        s._add_history("freq_switch",varlist)
1251        print_log()
1252        if insitu: self._assign(s)
1253        else: return s
1254
1255    def recalc_azel(self):
1256        """
1257        Recalculate the azimuth and elevation for each position.
1258        Parameters:
1259            none
1260        Example:
1261        """
1262        varlist = vars()
1263        self._recalcazel()
1264        self._add_history("recalc_azel", varlist)
1265        print_log()
1266        return
1267
1268    def __add__(self, other):
1269        varlist = vars()
1270        s = None
1271        if isinstance(other, scantable):
1272            print "scantable + scantable NYI"
1273            return
1274        elif isinstance(other, float):
1275            s = scantable(self._math._unaryop(self, other, "ADD", False))
1276        else:
1277            raise TypeError("Other input is not a scantable or float value")
1278        s._add_history("operator +", varlist)
1279        print_log()
1280        return s
1281
1282    def __sub__(self, other):
1283        """
1284        implicit on all axes and on Tsys
1285        """
1286        varlist = vars()
1287        s = None
1288        if isinstance(other, scantable):
1289            print "scantable - scantable NYI"
1290            return
1291        elif isinstance(other, float):
1292            s = scantable(self._math._unaryop(self, other, "SUB", False))
1293        else:
1294            raise TypeError("Other input is not a scantable or float value")
1295        s._add_history("operator -", varlist)
1296        print_log()
1297        return s
1298
1299    def __mul__(self, other):
1300        """
1301        implicit on all axes and on Tsys
1302        """
1303        varlist = vars()
1304        s = None
1305        if isinstance(other, scantable):
1306            print "scantable * scantable NYI"
1307            return
1308        elif isinstance(other, float):
1309            s = scantable(self._math._unaryop(self, other, "MUL", False))
1310        else:
1311            raise TypeError("Other input is not a scantable or float value")
1312        s._add_history("operator *", varlist)
1313        print_log()
1314        return s
1315
1316
1317    def __div__(self, other):
1318        """
1319        implicit on all axes and on Tsys
1320        """
1321        varlist = vars()
1322        s = None
1323        if isinstance(other, scantable):
1324            print "scantable / scantable NYI"
1325            return
1326        elif isinstance(other, float):
1327            if other == 0.0:
1328                raise ZeroDivisionError("Dividing by zero is not recommended")
1329            s = scantable(self._math._unaryop(self, other, "DIV", False))
1330        else:
1331            raise TypeError("Other input is not a scantable or float value")
1332        s._add_history("operator /", varlist)
1333        print_log()
1334        return s
1335
1336    def get_fit(self, row=0):
1337        """
1338        Print or return the stored fits for a row in the scantable
1339        Parameters:
1340            row:    the row which the fit has been applied to.
1341        """
1342        if row > self.nrow():
1343            return
1344        from asap.asapfit import asapfit
1345        fit = asapfit(self._getfit(row))
1346        if rcParams['verbose']:
1347            print fit
1348            return
1349        else:
1350            return fit.as_dict()
1351
1352    def _add_history(self, funcname, parameters):
1353        # create date
1354        sep = "##"
1355        from datetime import datetime
1356        dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1357        hist = dstr+sep
1358        hist += funcname+sep#cdate+sep
1359        if parameters.has_key('self'): del parameters['self']
1360        for k,v in parameters.iteritems():
1361            if type(v) is dict:
1362                for k2,v2 in v.iteritems():
1363                    hist += k2
1364                    hist += "="
1365                    if isinstance(v2,scantable):
1366                        hist += 'scantable'
1367                    elif k2 == 'mask':
1368                        if isinstance(v2,list) or isinstance(v2,tuple):
1369                            hist += str(self._zip_mask(v2))
1370                        else:
1371                            hist += str(v2)
1372                    else:
1373                        hist += str(v2)
1374            else:
1375                hist += k
1376                hist += "="
1377                if isinstance(v,scantable):
1378                    hist += 'scantable'
1379                elif k == 'mask':
1380                    if isinstance(v,list) or isinstance(v,tuple):
1381                        hist += str(self._zip_mask(v))
1382                    else:
1383                        hist += str(v)
1384                else:
1385                    hist += str(v)
1386            hist += sep
1387        hist = hist[:-2] # remove trailing '##'
1388        self._addhistory(hist)
1389
1390
1391    def _zip_mask(self, mask):
1392        mask = list(mask)
1393        i = 0
1394        segments = []
1395        while mask[i:].count(1):
1396            i += mask[i:].index(1)
1397            if mask[i:].count(0):
1398                j = i + mask[i:].index(0)
1399            else:
1400                j = len(mask)
1401            segments.append([i,j])
1402            i = j
1403        return segments
1404
1405    def _get_ordinate_label(self):
1406        fu = "("+self.get_fluxunit()+")"
1407        import re
1408        lbl = "Intensity"
1409        if re.match(".K.",fu):
1410            lbl = "Brightness Temperature "+ fu
1411        elif re.match(".Jy.",fu):
1412            lbl = "Flux density "+ fu
1413        return lbl
1414
1415    def _check_ifs(self):
1416        nchans = [self.nchan(i) for i in range(self.nif(-1))]
1417        nchans = filter(lambda t: t > 0, nchans)
1418        return (sum(nchans)/len(nchans) == nchans[0])
1419
1420    def _fill(self, names, unit, average):
1421        import os
1422        varlist = vars()
1423        from asap._asap import stfiller
1424        first = True
1425        fullnames = []
1426        for name in names:
1427            name = os.path.expandvars(name)
1428            name = os.path.expanduser(name)
1429            if not os.path.exists(name):
1430                msg = "File '%s' does not exists" % (name)
1431                if rcParams['verbose']:
1432                    asaplog.push(msg)
1433                    print asaplog.pop().strip()
1434                    return
1435                raise IOError(msg)
1436            fullnames.append(name)
1437        if average:
1438            asaplog.push('Auto averaging integrations')
1439        for name in fullnames:
1440            r = stfiller()
1441            msg = "Importing %s..." % (name)
1442            asaplog.push(msg,False)
1443            print_log()
1444            r._open(name,-1,-1)
1445            r._read()
1446            tbl = r._getdata()
1447            if average:
1448                tbl = self._math._average((tbl,),(),'NONE','SCAN', False)
1449                #tbl = tbl2
1450            if not first:
1451                tbl = self._math._merge([self, tbl])
1452                #tbl = tbl2
1453            Scantable.__init__(self, tbl)
1454            r._close()
1455            del r,tbl
1456            first = False
1457        if unit is not None:
1458            self.set_fluxunit(unit)
1459        self.set_freqframe(rcParams['scantable.freqframe'])
1460        #self._add_history("scantable", varlist)
1461
Note: See TracBrowser for help on using the repository browser.