source: trunk/python/scantable.py@ 1093

Last change on this file since 1093 was 1093, checked in by mar637, 18 years ago

removed FITS output, added drop_scan function

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 58.0 KB
Line 
1from asap._asap import Scantable
2from asap import rcParams
3from asap import print_log, asaplog
4from asap import selector
5from numarray import ones,zeros
6import sys
7
8class scantable(Scantable):
9 """
10 The ASAP container for scans
11 """
12
13 def __init__(self, filename, average=None, unit=None):
14 """
15 Create a scantable from a saved one or make a reference
16 Parameters:
17 filename: the name of an asap table on disk
18 or
19 the name of a rpfits/sdfits/ms file
20 (integrations within scans are auto averaged
21 and the whole file is read)
22 or
23 [advanced] a reference to an existing
24 scantable
25 average: average all integrations withinb a scan on read.
26 The default (True) is taken from .asaprc.
27 unit: brightness unit; must be consistent with K or Jy.
28 Over-rides the default selected by the reader
29 (input rpfits/sdfits/ms) or replaces the value
30 in existing scantables
31 """
32 if average is None:
33 average = rcParams['scantable.autoaverage']
34 varlist = vars()
35 from asap._asap import stmath
36 self._math = stmath()
37 if isinstance(filename, Scantable):
38 Scantable.__init__(self, filename)
39 else:
40 if isinstance(filename,str):
41 import os.path
42 filename = os.path.expandvars(filename)
43 filename = os.path.expanduser(filename)
44 if not os.path.exists(filename):
45 s = "File '%s' not found." % (filename)
46 if rcParams['verbose']:
47 asaplog.push(s)
48 print asaplog.pop().strip()
49 return
50 raise IOError(s)
51 if os.path.isdir(filename):
52 # crude check if asap table
53 if os.path.exists(filename+'/table.info'):
54 Scantable.__init__(self, filename, rcParams['scantable.storage']=='disk')
55 if unit is not None:
56 self.set_fluxunit(unit)
57 self.set_freqframe(rcParams['scantable.freqframe'])
58 else:
59 msg = "The given file '%s'is not a valid asap table." % (filename)
60 if rcParams['verbose']:
61 print msg
62 return
63 else:
64 raise IOError(msg)
65 else:
66 self._fill([filename],unit, average)
67 elif (isinstance(filename,list) or isinstance(filename,tuple)) \
68 and isinstance(filename[-1], str):
69 self._fill(filename, unit, average)
70 print_log()
71
72 def save(self, name=None, format=None, overwrite=False):
73 """
74 Store the scantable on disk. This can be an asap (aips++) Table, SDFITS,
75 Image FITS or MS2 format.
76 Parameters:
77 name: the name of the outputfile. For format "ASCII"
78 this is the root file name (data in 'name'.txt
79 and header in 'name'_header.txt)
80 format: an optional file format. Default is ASAP.
81 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
82 'SDFITS' (save as SDFITS file)
83 'ASCII' (saves as ascii text file)
84 'MS2' (saves as an aips++
85 MeasurementSet V2)
86 overwrite: If the file should be overwritten if it exists.
87 The default False is to return with warning
88 without writing the output. USE WITH CARE.
89 Example:
90 scan.save('myscan.asap')
91 scan.save('myscan.sdfits','SDFITS')
92 """
93 from os import path
94 if format is None: format = rcParams['scantable.save']
95 suffix = '.'+format.lower()
96 if name is None or name =="":
97 name = 'scantable'+suffix
98 from asap import asaplog
99 msg = "No filename given. Using default name %s..." % name
100 asaplog.push(msg)
101 name = path.expandvars(name)
102 if path.isfile(name) or path.isdir(name):
103 if not overwrite:
104 msg = "File %s exists." % name
105 if rcParams['verbose']:
106 print msg
107 return
108 else:
109 raise IOError(msg)
110 format2 = format.upper()
111 if format2 == 'ASAP':
112 self._save(name)
113 else:
114 from asap._asap import stwriter as stw
115 w = stw(format2)
116 w.write(self, name)
117 print_log()
118 return
119
120 def copy(self):
121 """
122 Return a copy of this scantable.
123 Parameters:
124 none
125 Example:
126 copiedscan = scan.copy()
127 """
128 sd = scantable(Scantable._copy(self))
129 return sd
130
131 def drop_scan(self, scanid=None):
132 """
133 Return a new scantable where the specified scan number(s) has(have)
134 been dropped.
135 Parameters:
136 scanid: a (list of) scan number(s)
137 """
138 from asap import _is_sequence_or_number as _is_valid
139 from asap import _to_list
140 from asap import unique
141 if not _is_valid(scanid):
142 if rcParams['verbose']:
143 print "Please specify a scanno to drop from the scantable"
144 return
145 else:
146 raise RuntimeError("No scan given")
147 try:
148 scanid = _to_list(scanid)
149 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
150 for sid in scanid: allscans.remove(sid)
151 if len(allscans) == 0: raise ValueError("Can't remove all scans")
152 except ValueError:
153 if rcParams['verbose']:
154 print "Couldn't find any match."
155 return
156 else: raise
157 try:
158 bsel = self.get_selection()
159 sel = selector()
160 sel.set_scans(allscans)
161 self.set_selection(bsel+sel)
162 scopy = self._copy()
163 self.set_selection(bsel)
164 return scantable(scopy)
165 except RuntimeError:
166 if rcParams['verbose']: print "Couldn't find any match."
167 else: raise
168
169
170 def get_scan(self, scanid=None):
171 """
172 Return a specific scan (by scanno) or collection of scans (by
173 source name) in a new scantable.
174 Parameters:
175 scanid: a (list of) scanno or a source name, unix-style
176 patterns are accepted for source name matching, e.g.
177 '*_R' gets all 'ref scans
178 Example:
179 # get all scans containing the source '323p459'
180 newscan = scan.get_scan('323p459')
181 # get all 'off' scans
182 refscans = scan.get_scan('*_R')
183 # get a susbset of scans by scanno (as listed in scan.summary())
184 newscan = scan.get_scan([0,2,7,10])
185 """
186 if scanid is None:
187 if rcParams['verbose']:
188 print "Please specify a scan no or name to retrieve from the scantable"
189 return
190 else:
191 raise RuntimeError("No scan given")
192
193 try:
194 bsel = self.get_selection()
195 sel = selector()
196 if type(scanid) is str:
197 sel.set_name(scanid)
198 self.set_selection(bsel+sel)
199 scopy = self._copy()
200 self.set_selection(bsel)
201 return scantable(scopy)
202 elif type(scanid) is int:
203 sel.set_scans([scanid])
204 self.set_selection(bsel+sel)
205 scopy = self._copy()
206 self.set_selection(bsel)
207 return scantable(scopy)
208 elif type(scanid) is list:
209 sel.set_scans(scanid)
210 self.set_selection(sel)
211 scopy = self._copy()
212 self.set_selection(bsel)
213 return scantable(scopy)
214 else:
215 msg = "Illegal scanid type, use 'int' or 'list' if ints."
216 if rcParams['verbose']:
217 print msg
218 else:
219 raise TypeError(msg)
220 except RuntimeError:
221 if rcParams['verbose']: print "Couldn't find any match."
222 else: raise
223
224 def __str__(self):
225 return Scantable._summary(self,True)
226
227 def summary(self, filename=None):
228 """
229 Print a summary of the contents of this scantable.
230 Parameters:
231 filename: the name of a file to write the putput to
232 Default - no file output
233 verbose: print extra info such as the frequency table
234 The default (False) is taken from .asaprc
235 """
236 info = Scantable._summary(self, True)
237 #if verbose is None: verbose = rcParams['scantable.verbosesummary']
238 if filename is not None:
239 if filename is "":
240 filename = 'scantable_summary.txt'
241 from os.path import expandvars, isdir
242 filename = expandvars(filename)
243 if not isdir(filename):
244 data = open(filename, 'w')
245 data.write(info)
246 data.close()
247 else:
248 msg = "Illegal file name '%s'." % (filename)
249 if rcParams['verbose']:
250 print msg
251 else:
252 raise IOError(msg)
253 if rcParams['verbose']:
254 try:
255 from IPython.genutils import page as pager
256 except ImportError:
257 from pydoc import pager
258 pager(info)
259 else:
260 return info
261
262
263 def get_selection(self):
264 """
265 Get the selection object currently set on this scantable.
266 Parameters:
267 none
268 Example:
269 sel = scan.get_selection()
270 sel.set_ifs(0) # select IF 0
271 scan.set_selection(sel) # apply modified selection
272 """
273 return selector(self._getselection())
274
275 def set_selection(self, selection=selector()):
276 """
277 Select a subset of the data. All following operations on this scantable
278 are only applied to thi selection.
279 Parameters:
280 selection: a selector object (default unset the selection)
281 Examples:
282 sel = selector() # create a selection object
283 self.set_scans([0,3]) # select SCANNO 0 and 3
284 scan.set_selection(sel) # set the selection
285 scan.summary() # will only print summary of scanno 0 an 3
286 scan.set_selection() # unset the selection
287 """
288 self._setselection(selection)
289
290 def set_cursor(self, beam=0, IF=0, pol=0):
291 print "DEPRECATED - use set_selection"
292
293 def get_cursor(self):
294 print "DEPRECATED - use get_selection"
295
296 def stats(self, stat='stddev', mask=None):
297 """
298 Determine the specified statistic of the current beam/if/pol
299 Takes a 'mask' as an optional parameter to specify which
300 channels should be excluded.
301 Parameters:
302 stat: 'min', 'max', 'sumsq', 'sum', 'mean'
303 'var', 'stddev', 'avdev', 'rms', 'median'
304 mask: an optional mask specifying where the statistic
305 should be determined.
306 Example:
307 scan.set_unit('channel')
308 msk = scan.create_mask([100,200],[500,600])
309 scan.stats(stat='mean', mask=m)
310 """
311 from numarray import array,zeros,Float
312 if mask == None:
313 mask = []
314 axes = ['Beam','IF','Pol','Time']
315 if not self._check_ifs():
316 raise ValueError("Cannot apply mask as the IFs have different number of channels"
317 "Please use setselection() to select individual IFs")
318
319 statvals = self._math._stats(self, mask, stat)
320 out = ''
321 axes = []
322 for i in range(self.nrow()):
323 axis = []
324 axis.append(self.getscan(i))
325 axis.append(self.getbeam(i))
326 axis.append(self.getif(i))
327 axis.append(self.getpol(i))
328 axis.append(self.getcycle(i))
329 axes.append(axis)
330 tm = self._gettime(i)
331 src = self._getsourcename(i)
332 out += 'Scan[%d] (%s) ' % (axis[0], src)
333 out += 'Time[%s]:\n' % (tm)
334 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
335 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
336 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
337 out += '= %3.3f\n' % (statvals[i])
338 out += "--------------------------------------------------\n"
339
340 if rcParams['verbose']:
341 print "--------------------------------------------------"
342 print " ",stat
343 print "--------------------------------------------------"
344 print out
345 retval = { 'axesnames': ['scanno','beamno','ifno','polno','cycleno'],
346 'axes' : axes,
347 'data': statvals}
348 return retval
349
350 def stddev(self,mask=None):
351 """
352 Determine the standard deviation of the current beam/if/pol
353 Takes a 'mask' as an optional parameter to specify which
354 channels should be excluded.
355 Parameters:
356 mask: an optional mask specifying where the standard
357 deviation should be determined.
358
359 Example:
360 scan.set_unit('channel')
361 msk = scan.create_mask([100,200],[500,600])
362 scan.stddev(mask=m)
363 """
364 return self.stats(stat='stddev',mask=mask);
365
366
367 def column_names(self):
368 """
369 Return a list of column names, which can be used for selection.
370 """
371 return list(Scantable.column_names(self))
372
373 def get_tsys(self):
374 """
375 Return the System temperatures.
376 Parameters:
377
378 Returns:
379 a list of Tsys values for the current selection
380 """
381
382 return self._row_callback(self._gettsys, "Tsys")
383
384 def _row_callback(self, callback, label):
385 axes = []
386 axesnames = ['scanno','beamno','ifno','polno','cycleno']
387 out = ""
388 outvec =[]
389 for i in range(self.nrow()):
390 axis = []
391 axis.append(self.getscan(i))
392 axis.append(self.getbeam(i))
393 axis.append(self.getif(i))
394 axis.append(self.getpol(i))
395 axis.append(self.getcycle(i))
396 axes.append(axis)
397 tm = self._gettime(i)
398 src = self._getsourcename(i)
399 out += 'Scan[%d] (%s) ' % (axis[0], src)
400 out += 'Time[%s]:\n' % (tm)
401 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
402 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
403 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
404 outvec.append(callback(i))
405 out += '= %3.3f\n' % (outvec[i])
406 out += "--------------------------------------------------\n"
407 if rcParams['verbose']:
408 print "--------------------------------------------------"
409 print " %s" % (label)
410 print "--------------------------------------------------"
411 print out
412 retval = {'axesnames': axesnames, 'axes': axes, 'data': outvec}
413 return retval
414
415 def _get_column(self, callback, row=-1):
416 """
417 """
418 if row == -1:
419 return [callback(i) for i in range(self.nrow())]
420 else:
421 if 0 <= row < self.nrow():
422 return callback(row)
423
424
425 def get_time(self, row=-1):
426 """
427 Get a list of time stamps for the observations.
428 Return a string for each integration in the scantable.
429 Parameters:
430 row: row no of integration. Default -1 return all rows
431 Example:
432 none
433 """
434 return self._get_column(self._gettime, row)
435
436 def get_sourcename(self, row=-1):
437 """
438 Get a list source names for the observations.
439 Return a string for each integration in the scantable.
440 Parameters:
441 row: row no of integration. Default -1 return all rows
442 Example:
443 none
444 """
445 return self._get_column(self._getsourcename, row)
446
447 def get_elevation(self, row=-1):
448 """
449 Get a list of elevations for the observations.
450 Return a float for each integration in the scantable.
451 Parameters:
452 row: row no of integration. Default -1 return all rows
453 Example:
454 none
455 """
456 return self._get_column(self._getelevation, row)
457
458 def get_azimuth(self, row=-1):
459 """
460 Get a list of azimuths for the observations.
461 Return a float for each integration in the scantable.
462 Parameters:
463 row: row no of integration. Default -1 return all rows
464 Example:
465 none
466 """
467 return self._get_column(self._getazimuth, row)
468
469 def get_parangle(self, row=-1):
470 """
471 Get a list of parallactic angles for the observations.
472 Return a float for each integration in the scantable.
473 Parameters:
474 row: row no of integration. Default -1 return all rows
475 Example:
476 none
477 """
478 return self._get_column(self._getparangle, row)
479
480 def get_direction(self, row=-1):
481 """
482 Get a list of Positions on the sky (direction) for the observations.
483 Return a float for each integration in the scantable.
484 Parameters:
485 row: row no of integration. Default -1 return all rows
486 Example:
487 none
488 """
489 return self._get_column(self._getdirection, row)
490
491 def set_unit(self, unit='channel'):
492 """
493 Set the unit for all following operations on this scantable
494 Parameters:
495 unit: optional unit, default is 'channel'
496 one of '*Hz','km/s','channel', ''
497 """
498 varlist = vars()
499 if unit in ['','pixel', 'channel']:
500 unit = ''
501 inf = list(self._getcoordinfo())
502 inf[0] = unit
503 self._setcoordinfo(inf)
504 self._add_history("set_unit",varlist)
505
506 def set_instrument(self, instr):
507 """
508 Set the instrument for subsequent processing
509 Parameters:
510 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
511 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
512 """
513 self._setInstrument(instr)
514 self._add_history("set_instument",vars())
515 print_log()
516
517 def set_doppler(self, doppler='RADIO'):
518 """
519 Set the doppler for all following operations on this scantable.
520 Parameters:
521 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
522 """
523 varlist = vars()
524 inf = list(self._getcoordinfo())
525 inf[2] = doppler
526 self._setcoordinfo(inf)
527 self._add_history("set_doppler",vars())
528 print_log()
529
530 def set_freqframe(self, frame=None):
531 """
532 Set the frame type of the Spectral Axis.
533 Parameters:
534 frame: an optional frame type, default 'LSRK'. Valid frames are:
535 'REST','TOPO','LSRD','LSRK','BARY',
536 'GEO','GALACTO','LGROUP','CMB'
537 Examples:
538 scan.set_freqframe('BARY')
539 """
540 if frame is None: frame = rcParams['scantable.freqframe']
541 varlist = vars()
542 valid = ['REST','TOPO','LSRD','LSRK','BARY', \
543 'GEO','GALACTO','LGROUP','CMB']
544
545 if frame in valid:
546 inf = list(self._getcoordinfo())
547 inf[1] = frame
548 self._setcoordinfo(inf)
549 self._add_history("set_freqframe",varlist)
550 else:
551 msg = "Please specify a valid freq type. Valid types are:\n",valid
552 if rcParams['verbose']:
553 print msg
554 else:
555 raise TypeError(msg)
556 print_log()
557
558 def set_dirframe(self, frame=""):
559 """
560 Set the frame type of the Direction on the sky.
561 Parameters:
562 frame: an optional frame type, default ''. Valid frames are:
563 'J2000', 'B1950', 'GALACTIC'
564 Examples:
565 scan.set_dirframe('GALACTIC')
566 """
567 varlist = vars()
568 try:
569 Scantable.set_dirframe(self, frame)
570 except RuntimeError,msg:
571 if rcParams['verbose']:
572 print msg
573 else:
574 raise
575 self._add_history("set_dirframe",varlist)
576
577 def get_unit(self):
578 """
579 Get the default unit set in this scantable
580 Parameters:
581 Returns:
582 A unit string
583 """
584 inf = self._getcoordinfo()
585 unit = inf[0]
586 if unit == '': unit = 'channel'
587 return unit
588
589 def get_abcissa(self, rowno=0):
590 """
591 Get the abcissa in the current coordinate setup for the currently
592 selected Beam/IF/Pol
593 Parameters:
594 rowno: an optional row number in the scantable. Default is the
595 first row, i.e. rowno=0
596 Returns:
597 The abcissa values and it's format string (as a dictionary)
598 """
599 abc = self._getabcissa(rowno)
600 lbl = self._getabcissalabel(rowno)
601 print_log()
602 return abc, lbl
603
604 def flag(self, mask=[]):
605 """
606 Flag the selected data using an optional channel mask.
607 Parameters:
608 mask: an optional channel mask, created with create_mask. Default
609 (no mask) is all channels.
610 """
611 varlist = vars()
612 try:
613 self._flag(mask)
614 except RuntimeError,msg:
615 if rcParams['verbose']:
616 print msg
617 return
618 else: raise
619 self._add_history("flag", varlist)
620
621
622 def create_mask(self, *args, **kwargs):
623 """
624 Compute and return a mask based on [min,max] windows.
625 The specified windows are to be INCLUDED, when the mask is
626 applied.
627 Parameters:
628 [min,max],[min2,max2],...
629 Pairs of start/end points (inclusive)specifying the regions
630 to be masked
631 invert: optional argument. If specified as True,
632 return an inverted mask, i.e. the regions
633 specified are EXCLUDED
634 row: create the mask using the specified row for
635 unit conversions, default is row=0
636 only necessary if frequency varies over rows.
637 Example:
638 scan.set_unit('channel')
639 a)
640 msk = scan.create_mask([400,500],[800,900])
641 # masks everything outside 400 and 500
642 # and 800 and 900 in the unit 'channel'
643
644 b)
645 msk = scan.create_mask([400,500],[800,900], invert=True)
646 # masks the regions between 400 and 500
647 # and 800 and 900 in the unit 'channel'
648 c)
649 mask only channel 400
650 msk = scan.create_mask([400,400])
651 """
652 row = 0
653 if kwargs.has_key("row"):
654 row = kwargs.get("row")
655 data = self._getabcissa(row)
656 u = self._getcoordinfo()[0]
657 if rcParams['verbose']:
658 if u == "": u = "channel"
659 from asap import asaplog
660 msg = "The current mask window unit is %s" % u
661 if not self._check_ifs():
662 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
663 asaplog.push(msg)
664 n = self.nchan()
665 msk = zeros(n)
666 # test if args is a 'list' or a 'normal *args - UGLY!!!
667
668 ws = (isinstance(args[-1][-1],int) or isinstance(args[-1][-1],float)) and args or args[0]
669 for window in ws:
670 if (len(window) != 2 or window[0] > window[1] ):
671 raise TypeError("A window needs to be defined as [min,max]")
672 for i in range(n):
673 if data[i] >= window[0] and data[i] <= window[1]:
674 msk[i] = 1
675 if kwargs.has_key('invert'):
676 if kwargs.get('invert'):
677 from numarray import logical_not
678 msk = logical_not(msk)
679 print_log()
680 return msk
681
682 def get_restfreqs(self):
683 """
684 Get the restfrequency(s) stored in this scantable.
685 The return value(s) are always of unit 'Hz'
686 Parameters:
687 none
688 Returns:
689 a list of doubles
690 """
691 return list(self._getrestfreqs())
692
693
694 def set_restfreqs(self, freqs=None, unit='Hz'):
695 """
696 Set or replace the restfrequency specified and
697 If the 'freqs' argument holds a scalar,
698 then that rest frequency will be applied to all the selected
699 data. If the 'freqs' argument holds
700 a vector, then it MUST be of equal or smaller length than
701 the number of IFs (and the available restfrequencies will be
702 replaced by this vector). In this case, *all* data have
703 the restfrequency set per IF according
704 to the corresponding value you give in the 'freqs' vector.
705 E.g. 'freqs=[1e9,2e9]' would mean IF 0 gets restfreq 1e9 and
706 IF 1 gets restfreq 2e9.
707 You can also specify the frequencies via known line names
708 from the built-in Lovas table.
709 Parameters:
710 freqs: list of rest frequency values or string idenitfiers
711 unit: unit for rest frequency (default 'Hz')
712
713 Example:
714 # set the given restfrequency for the whole table
715 scan.set_restfreqs(freqs=1.4e9)
716 # If thee number of IFs in the data is >= 2 the IF0 gets the first
717 # value IF1 the second...
718 scan.set_restfreqs(freqs=[1.4e9,1.67e9])
719 #set the given restfrequency for the whole table (by name)
720 scan.set_restfreqs(freqs="OH1667")
721
722 Note:
723 To do more sophisticate Restfrequency setting, e.g. on a
724 source and IF basis, use scantable.set_selection() before using
725 this function.
726 # provide your scantable is call scan
727 selection = selector()
728 selection.set_name("ORION*")
729 selection.set_ifs([1])
730 scan.set_selection(selection)
731 scan.set_restfreqs(freqs=86.6e9)
732
733 """
734 varlist = vars()
735
736 t = type(freqs)
737 if isinstance(freqs, int) or isinstance(freqs,float):
738 self._setrestfreqs(freqs, unit)
739 elif isinstance(freqs, list) or isinstance(freqs,tuple):
740 if isinstance(freqs[-1], int) or isinstance(freqs[-1],float):
741 sel = selector()
742 savesel = self._getselection()
743 for i in xrange(len(freqs)):
744 sel.set_ifs([i])
745 self._setselection(sel)
746 self._setrestfreqs(freqs[i], unit)
747 self._setselection(savesel)
748 elif isinstance(freqs[-1], str):
749 # not yet implemented
750 pass
751 else:
752 return
753 self._add_history("set_restfreqs", varlist)
754
755
756
757 def history(self):
758 hist = list(self._gethistory())
759 out = "-"*80
760 for h in hist:
761 if h.startswith("---"):
762 out += "\n"+h
763 else:
764 items = h.split("##")
765 date = items[0]
766 func = items[1]
767 items = items[2:]
768 out += "\n"+date+"\n"
769 out += "Function: %s\n Parameters:" % (func)
770 for i in items:
771 s = i.split("=")
772 out += "\n %s = %s" % (s[0],s[1])
773 out += "\n"+"-"*80
774 try:
775 from IPython.genutils import page as pager
776 except ImportError:
777 from pydoc import pager
778 pager(out)
779 return
780
781 #
782 # Maths business
783 #
784
785 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
786 """
787 Return the (time) weighted average of a scan.
788 Note:
789 in channels only - align if necessary
790 Parameters:
791 one scan or comma separated scans
792 mask: an optional mask (only used for 'var' and 'tsys'
793 weighting)
794 scanav: True averages each scan separately
795 False (default) averages all scans together,
796 weight: Weighting scheme. 'none', 'var' (1/var(spec)
797 weighted), 'tsys' (1/Tsys**2 weighted), 'tint'
798 (integration time weighted) or 'tintsys' (Tint/Tsys**2).
799 The default is 'tint'
800 align: align the spectra in velocity before averaging. It takes
801 the time of the first spectrum as reference time.
802 Example:
803 # time average the scantable without using a mask
804 newscan = scan.average_time()
805 """
806 varlist = vars()
807 if weight is None: weight = 'TINT'
808 if mask is None: mask = ()
809 if scanav:
810 scanav = "SCAN"
811 else:
812 scanav = "NONE"
813 scan = (self,)
814 try:
815 if align:
816 scan = (self.freq_align(insitu=False),)
817 s = scantable(self._math._average(scan, mask, weight.upper(),
818 scanav))
819 except RuntimeError,msg:
820 if rcParams['verbose']:
821 print msg
822 return
823 else: raise
824 s._add_history("average_time",varlist)
825 print_log()
826 return s
827
828 def average_channel(self, mode="MEDIAN", scanav=False, align=False):
829 """
830 Return the (median) average of a scan.
831 Note:
832 in channels only - align if necessary
833 the median Tsys is computed.
834 Parameters:
835 one scan or comma separated scans
836 mode: type of average, default "MEDIAN"
837 scanav: True averages each scan separately
838 False (default) averages all scans together,
839 align: align the spectra in velocity before averaging. It takes
840 the time of the first spectrum as reference time.
841 Example:
842 # median average the scan
843 newscan = scan.average_channel()
844 """
845 varlist = vars()
846 if mode is None: mode = 'MEDIAN'
847 scanav = "NONE"
848 if scanav: scanav = "SCAN"
849 scan = self
850 try:
851 if align:
852 scan = self.freq_align(insitu=False)
853 s = scantable(self._math._averagechannel(scan, mode, scanav))
854 except RuntimeError,msg:
855 if rcParams['verbose']:
856 print msg
857 return
858 else: raise
859 s._add_history("average_channel",varlist)
860 print_log()
861 return s
862
863 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
864 """
865 Return a scan where all spectra are converted to either
866 Jansky or Kelvin depending upon the flux units of the scan table.
867 By default the function tries to look the values up internally.
868 If it can't find them (or if you want to over-ride), you must
869 specify EITHER jyperk OR eta (and D which it will try to look up
870 also if you don't set it). jyperk takes precedence if you set both.
871 Parameters:
872 jyperk: the Jy / K conversion factor
873 eta: the aperture efficiency
874 d: the geomtric diameter (metres)
875 insitu: if False a new scantable is returned.
876 Otherwise, the scaling is done in-situ
877 The default is taken from .asaprc (False)
878 allaxes: if True apply to all spectra. Otherwise
879 apply only to the selected (beam/pol/if)spectra only
880 The default is taken from .asaprc (True if none)
881 """
882 if insitu is None: insitu = rcParams['insitu']
883 self._math._setinsitu(insitu)
884 varlist = vars()
885 if jyperk is None: jyperk = -1.0
886 if d is None: d = -1.0
887 if eta is None: eta = -1.0
888 s = scantable(self._math._convertflux(self, d, eta, jyperk))
889 s._add_history("convert_flux", varlist)
890 print_log()
891 if insitu: self._assign(s)
892 else: return s
893
894 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
895 """
896 Return a scan after applying a gain-elevation correction.
897 The correction can be made via either a polynomial or a
898 table-based interpolation (and extrapolation if necessary).
899 You specify polynomial coefficients, an ascii table or neither.
900 If you specify neither, then a polynomial correction will be made
901 with built in coefficients known for certain telescopes (an error
902 will occur if the instrument is not known).
903 The data and Tsys are *divided* by the scaling factors.
904 Parameters:
905 poly: Polynomial coefficients (default None) to compute a
906 gain-elevation correction as a function of
907 elevation (in degrees).
908 filename: The name of an ascii file holding correction factors.
909 The first row of the ascii file must give the column
910 names and these MUST include columns
911 "ELEVATION" (degrees) and "FACTOR" (multiply data
912 by this) somewhere.
913 The second row must give the data type of the
914 column. Use 'R' for Real and 'I' for Integer.
915 An example file would be
916 (actual factors are arbitrary) :
917
918 TIME ELEVATION FACTOR
919 R R R
920 0.1 0 0.8
921 0.2 20 0.85
922 0.3 40 0.9
923 0.4 60 0.85
924 0.5 80 0.8
925 0.6 90 0.75
926 method: Interpolation method when correcting from a table.
927 Values are "nearest", "linear" (default), "cubic"
928 and "spline"
929 insitu: if False a new scantable is returned.
930 Otherwise, the scaling is done in-situ
931 The default is taken from .asaprc (False)
932 """
933
934 if insitu is None: insitu = rcParams['insitu']
935 self._math._setinsitu(insitu)
936 varlist = vars()
937 if poly is None:
938 poly = ()
939 from os.path import expandvars
940 filename = expandvars(filename)
941 s = scantable(self._math._gainel(self, poly, filename, method))
942 s._add_history("gain_el", varlist)
943 print_log()
944 if insitu: self._assign(s)
945 else: return s
946
947 def freq_align(self, reftime=None, method='cubic', insitu=None):
948 """
949 Return a scan where all rows have been aligned in frequency/velocity.
950 The alignment frequency frame (e.g. LSRK) is that set by function
951 set_freqframe.
952 Parameters:
953 reftime: reference time to align at. By default, the time of
954 the first row of data is used.
955 method: Interpolation method for regridding the spectra.
956 Choose from "nearest", "linear", "cubic" (default)
957 and "spline"
958 insitu: if False a new scantable is returned.
959 Otherwise, the scaling is done in-situ
960 The default is taken from .asaprc (False)
961 """
962 if insitu is None: insitu = rcParams["insitu"]
963 self._math._setinsitu(insitu)
964 varlist = vars()
965 if reftime is None: reftime = ""
966 s = scantable(self._math._freq_align(self, reftime, method))
967 s._add_history("freq_align", varlist)
968 print_log()
969 if insitu: self._assign(s)
970 else: return s
971
972 def opacity(self, tau, insitu=None):
973 """
974 Apply an opacity correction. The data
975 and Tsys are multiplied by the correction factor.
976 Parameters:
977 tau: Opacity from which the correction factor is
978 exp(tau*ZD)
979 where ZD is the zenith-distance
980 insitu: if False a new scantable is returned.
981 Otherwise, the scaling is done in-situ
982 The default is taken from .asaprc (False)
983 """
984 if insitu is None: insitu = rcParams['insitu']
985 self._math._setinsitu(insitu)
986 varlist = vars()
987 s = scantable(self._math._opacity(self, tau))
988 s._add_history("opacity", varlist)
989 print_log()
990 if insitu: self._assign(s)
991 else: return s
992
993 def bin(self, width=5, insitu=None):
994 """
995 Return a scan where all spectra have been binned up.
996 width: The bin width (default=5) in pixels
997 insitu: if False a new scantable is returned.
998 Otherwise, the scaling is done in-situ
999 The default is taken from .asaprc (False)
1000 """
1001 if insitu is None: insitu = rcParams['insitu']
1002 self._math._setinsitu(insitu)
1003 varlist = vars()
1004 s = scantable(self._math._bin(self, width))
1005 s._add_history("bin",varlist)
1006 print_log()
1007 if insitu: self._assign(s)
1008 else: return s
1009
1010
1011 def resample(self, width=5, method='cubic', insitu=None):
1012 """
1013 Return a scan where all spectra have been binned up
1014 width: The bin width (default=5) in pixels
1015 method: Interpolation method when correcting from a table.
1016 Values are "nearest", "linear", "cubic" (default)
1017 and "spline"
1018 insitu: if False a new scantable is returned.
1019 Otherwise, the scaling is done in-situ
1020 The default is taken from .asaprc (False)
1021 """
1022 if insitu is None: insitu = rcParams['insitu']
1023 self._math._setinsitu(insitu)
1024 varlist = vars()
1025 s = scantable(self._math._resample(self, method, width))
1026 s._add_history("resample",varlist)
1027 print_log()
1028 if insitu: self._assign(s)
1029 else: return s
1030
1031
1032 def average_pol(self, mask=None, weight='none'):
1033 """
1034 Average the Polarisations together.
1035 Parameters:
1036 mask: An optional mask defining the region, where the
1037 averaging will be applied. The output will have all
1038 specified points masked.
1039 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1040 weighted), or 'tsys' (1/Tsys**2 weighted)
1041 """
1042 varlist = vars()
1043 if mask is None:
1044 mask = ()
1045 s = scantable(self._math._averagepol(self, mask, weight.upper()))
1046 s._add_history("average_pol",varlist)
1047 print_log()
1048 return s
1049
1050 def convert_pol(self, poltype=None):
1051 """
1052 Convert the data to a different polarisation type.
1053 Parameters:
1054 poltype: The new polarisation type. Valid types are:
1055 "linear", "stokes" and "circular"
1056 """
1057 varlist = vars()
1058 try:
1059 s = scantable(self._math._convertpol(self, poltype))
1060 except RuntimeError,msg:
1061 if rcParams['verbose']:
1062 print msg
1063 return
1064 else:
1065 raise
1066 s._add_history("convert_pol",varlist)
1067 print_log()
1068 return s
1069
1070 def smooth(self, kernel="hanning", width=5.0, insitu=None):
1071 """
1072 Smooth the spectrum by the specified kernel (conserving flux).
1073 Parameters:
1074 scan: The input scan
1075 kernel: The type of smoothing kernel. Select from
1076 'hanning' (default), 'gaussian' and 'boxcar'.
1077 The first three characters are sufficient.
1078 width: The width of the kernel in pixels. For hanning this is
1079 ignored otherwise it defauls to 5 pixels.
1080 For 'gaussian' it is the Full Width Half
1081 Maximum. For 'boxcar' it is the full width.
1082 insitu: if False a new scantable is returned.
1083 Otherwise, the scaling is done in-situ
1084 The default is taken from .asaprc (False)
1085 Example:
1086 none
1087 """
1088 if insitu is None: insitu = rcParams['insitu']
1089 self._math._setinsitu(insitu)
1090 varlist = vars()
1091 s = scantable(self._math._smooth(self,kernel.lower(),width))
1092 s._add_history("smooth", varlist)
1093 print_log()
1094 if insitu: self._assign(s)
1095 else: return s
1096
1097
1098 def poly_baseline(self, mask=None, order=0, plot=False, insitu=None):
1099 """
1100 Return a scan which has been baselined (all rows) by a polynomial.
1101 Parameters:
1102 scan: a scantable
1103 mask: an optional mask
1104 order: the order of the polynomial (default is 0)
1105 plot: plot the fit and the residual. In this each
1106 indivual fit has to be approved, by typing 'y'
1107 or 'n'
1108 insitu: if False a new scantable is returned.
1109 Otherwise, the scaling is done in-situ
1110 The default is taken from .asaprc (False)
1111 Example:
1112 # return a scan baselined by a third order polynomial,
1113 # not using a mask
1114 bscan = scan.poly_baseline(order=3)
1115 """
1116 if insitu is None: insitu = rcParams['insitu']
1117 varlist = vars()
1118 if mask is None:
1119 from numarray import ones
1120 mask = list(ones(self.nchan(-1)))
1121 from asap.asapfitter import fitter
1122 f = fitter()
1123 f.set_scan(self, mask)
1124 f.set_function(poly=order)
1125 s = f.auto_fit(insitu, plot=plot)
1126 s._add_history("poly_baseline", varlist)
1127 print_log()
1128 if insitu: self._assign(s)
1129 else: return s
1130
1131 def auto_poly_baseline(self, mask=[], edge=(0,0), order=0,
1132 threshold=3, plot=False, insitu=None):
1133 """
1134 Return a scan which has been baselined (all rows) by a polynomial.
1135 Spectral lines are detected first using linefinder and masked out
1136 to avoid them affecting the baseline solution.
1137
1138 Parameters:
1139 mask: an optional mask retreived from scantable
1140 edge: an optional number of channel to drop at
1141 the edge of spectrum. If only one value is
1142 specified, the same number will be dropped from
1143 both sides of the spectrum. Default is to keep
1144 all channels. Nested tuples represent individual
1145 edge selection for different IFs (a number of spectral
1146 channels can be different)
1147 order: the order of the polynomial (default is 0)
1148 threshold: the threshold used by line finder. It is better to
1149 keep it large as only strong lines affect the
1150 baseline solution.
1151 plot: plot the fit and the residual. In this each
1152 indivual fit has to be approved, by typing 'y'
1153 or 'n'
1154 insitu: if False a new scantable is returned.
1155 Otherwise, the scaling is done in-situ
1156 The default is taken from .asaprc (False)
1157
1158 Example:
1159 scan2=scan.auto_poly_baseline(order=7)
1160 """
1161 if insitu is None: insitu = rcParams['insitu']
1162 varlist = vars()
1163 from asap.asapfitter import fitter
1164 from asap.asaplinefind import linefinder
1165 from asap import _is_sequence_or_number as _is_valid
1166
1167 # check whether edge is set up for each IF individually
1168 individualEdge = False;
1169 if len(edge)>1:
1170 if isinstance(edge[0],list) or isinstance(edge[0],tuple):
1171 individualEdge = True;
1172
1173 if not _is_valid(edge, int) and not individualEdge:
1174 raise ValueError, "Parameter 'edge' has to be an integer or a \
1175 pair of integers specified as a tuple. Nested tuples are allowed \
1176 to make individual selection for different IFs."
1177
1178 curedge = (0,0)
1179 if individualEdge:
1180 for edge_par in edge:
1181 if not _is_valid(edge,int):
1182 raise ValueError, "Each element of the 'edge' tuple has \
1183 to be a pair of integers or an integer."
1184 else:
1185 curedge = edge;
1186
1187 # setup fitter
1188 f = fitter()
1189 f.set_function(poly=order)
1190
1191 # setup line finder
1192 fl=linefinder()
1193 fl.set_options(threshold=threshold)
1194
1195 if not insitu:
1196 workscan=self.copy()
1197 else:
1198 workscan=self
1199
1200 fl.set_scan(workscan)
1201
1202 rows=range(workscan.nrow())
1203 from asap import asaplog
1204 asaplog.push("Processing:")
1205 for r in rows:
1206 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % (workscan.getscan(r),workscan.getbeam(r),workscan.getif(r),workscan.getpol(r), workscan.getcycle(r))
1207 asaplog.push(msg, False)
1208
1209 # figure out edge parameter
1210 if individualEdge:
1211 if len(edge)>=workscan.getif(r):
1212 raise RuntimeError, "Number of edge elements appear to be less than the number of IFs"
1213 curedge = edge[workscan.getif(r)]
1214
1215 # setup line finder
1216 fl.find_lines(r,mask,curedge)
1217 f.set_scan(workscan, fl.get_mask())
1218 f.x = workscan._getabcissa(r)
1219 f.y = workscan._getspectrum(r)
1220 f.data = None
1221 f.fit()
1222 x = f.get_parameters()
1223 if plot:
1224 f.plot(residual=True)
1225 x = raw_input("Accept fit ( [y]/n ): ")
1226 if x.upper() == 'N':
1227 continue
1228 workscan._setspectrum(f.fitter.getresidual(), r)
1229 if plot:
1230 f._p.unmap()
1231 f._p = None
1232 workscan._add_history("auto_poly_baseline", varlist)
1233 if insitu:
1234 self._assign(workscan)
1235 else:
1236 return workscan
1237
1238 def rotate_linpolphase(self, angle):
1239 """
1240 Rotate the phase of the complex polarization O=Q+iU correlation.
1241 This is always done in situ in the raw data. So if you call this
1242 function more than once then each call rotates the phase further.
1243 Parameters:
1244 angle: The angle (degrees) to rotate (add) by.
1245 Examples:
1246 scan.rotate_linpolphase(2.3)
1247 """
1248 varlist = vars()
1249 self._math._rotate_linpolphase(self, angle)
1250 self._add_history("rotate_linpolphase", varlist)
1251 print_log()
1252 return
1253
1254
1255 def rotate_xyphase(self, angle):
1256 """
1257 Rotate the phase of the XY correlation. This is always done in situ
1258 in the data. So if you call this function more than once
1259 then each call rotates the phase further.
1260 Parameters:
1261 angle: The angle (degrees) to rotate (add) by.
1262 Examples:
1263 scan.rotate_xyphase(2.3)
1264 """
1265 varlist = vars()
1266 self._math._rotate_xyphase(self, angle)
1267 self._add_history("rotate_xyphase", varlist)
1268 print_log()
1269 return
1270
1271 def swap_linears(self):
1272 """
1273 Swap the linear polarisations XX and YY
1274 """
1275 varlist = vars()
1276 self._math._swap_linears(self)
1277 self._add_history("swap_linears", varlist)
1278 print_log()
1279 return
1280
1281 def invert_phase(self):
1282 """
1283 Invert the phase of the complex polarisation
1284 """
1285 varlist = vars()
1286 self._math._invert_phase(self)
1287 self._add_history("invert_phase", varlist)
1288 print_log()
1289 return
1290
1291 def add(self, offset, insitu=None):
1292 """
1293 Return a scan where all spectra have the offset added
1294 Parameters:
1295 offset: the offset
1296 insitu: if False a new scantable is returned.
1297 Otherwise, the scaling is done in-situ
1298 The default is taken from .asaprc (False)
1299 """
1300 if insitu is None: insitu = rcParams['insitu']
1301 self._math._setinsitu(insitu)
1302 varlist = vars()
1303 s = scantable(self._math._unaryop(self, offset, "ADD", False))
1304 s._add_history("add",varlist)
1305 print_log()
1306 if insitu:
1307 self._assign(s)
1308 else:
1309 return s
1310
1311 def scale(self, factor, tsys=True, insitu=None,):
1312 """
1313 Return a scan where all spectra are scaled by the give 'factor'
1314 Parameters:
1315 factor: the scaling factor
1316 insitu: if False a new scantable is returned.
1317 Otherwise, the scaling is done in-situ
1318 The default is taken from .asaprc (False)
1319 tsys: if True (default) then apply the operation to Tsys
1320 as well as the data
1321 """
1322 if insitu is None: insitu = rcParams['insitu']
1323 self._math._setinsitu(insitu)
1324 varlist = vars()
1325 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
1326 s._add_history("scale",varlist)
1327 print_log()
1328 if insitu:
1329 self._assign(s)
1330 else:
1331 return s
1332
1333 def auto_quotient(self, mode='time', preserve=True):
1334 """
1335 This function allows to build quotients automatically.
1336 It assumes the observation to have the same numer of
1337 "ons" and "offs"
1338 It will support "closest off in time" in the future
1339 Parameters:
1340 mode: the on/off detection mode; 'suffix' (default)
1341 'suffix' identifies 'off' scans by the
1342 trailing '_R' (Mopra/Parkes) or
1343 '_e'/'_w' (Tid)
1344 preserve: you can preserve (default) the continuum or
1345 remove it. The equations used are
1346 preserve: Output = Toff * (on/off) - Toff
1347 remove: Output = Toff * (on/off) - Ton
1348 """
1349 modes = ["time"]
1350 if not mode in modes:
1351 msg = "please provide valid mode. Valid modes are %s" % (modes)
1352 raise ValueError(msg)
1353 varlist = vars()
1354 s = scantable(self._math._auto_quotient(self, mode, preserve))
1355 s._add_history("auto_quotient",varlist)
1356 print_log()
1357 return s
1358
1359
1360
1361
1362 def freq_switch(self, insitu=None):
1363 """
1364 Apply frequency switching to the data.
1365 Parameters:
1366 insitu: if False a new scantable is returned.
1367 Otherwise, the swictching is done in-situ
1368 The default is taken from .asaprc (False)
1369 Example:
1370 none
1371 """
1372 if insitu is None: insitu = rcParams['insitu']
1373 self._math._setinsitu(insitu)
1374 varlist = vars()
1375 s = scantable(self._math._freqswitch(self))
1376 s._add_history("freq_switch",varlist)
1377 print_log()
1378 if insitu: self._assign(s)
1379 else: return s
1380
1381 def recalc_azel(self):
1382 """
1383 Recalculate the azimuth and elevation for each position.
1384 Parameters:
1385 none
1386 Example:
1387 """
1388 varlist = vars()
1389 self._recalcazel()
1390 self._add_history("recalc_azel", varlist)
1391 print_log()
1392 return
1393
1394 def __add__(self, other):
1395 varlist = vars()
1396 s = None
1397 if isinstance(other, scantable):
1398 print "scantable + scantable NYI"
1399 return
1400 elif isinstance(other, float):
1401 s = scantable(self._math._unaryop(self, other, "ADD", False))
1402 else:
1403 raise TypeError("Other input is not a scantable or float value")
1404 s._add_history("operator +", varlist)
1405 print_log()
1406 return s
1407
1408 def __sub__(self, other):
1409 """
1410 implicit on all axes and on Tsys
1411 """
1412 varlist = vars()
1413 s = None
1414 if isinstance(other, scantable):
1415 print "scantable - scantable NYI"
1416 return
1417 elif isinstance(other, float):
1418 s = scantable(self._math._unaryop(self, other, "SUB", False))
1419 else:
1420 raise TypeError("Other input is not a scantable or float value")
1421 s._add_history("operator -", varlist)
1422 print_log()
1423 return s
1424
1425 def __mul__(self, other):
1426 """
1427 implicit on all axes and on Tsys
1428 """
1429 varlist = vars()
1430 s = None
1431 if isinstance(other, scantable):
1432 print "scantable * scantable NYI"
1433 return
1434 elif isinstance(other, float):
1435 s = scantable(self._math._unaryop(self, other, "MUL", False))
1436 else:
1437 raise TypeError("Other input is not a scantable or float value")
1438 s._add_history("operator *", varlist)
1439 print_log()
1440 return s
1441
1442
1443 def __div__(self, other):
1444 """
1445 implicit on all axes and on Tsys
1446 """
1447 varlist = vars()
1448 s = None
1449 if isinstance(other, scantable):
1450 print "scantable / scantable NYI"
1451 return
1452 elif isinstance(other, float):
1453 if other == 0.0:
1454 raise ZeroDivisionError("Dividing by zero is not recommended")
1455 s = scantable(self._math._unaryop(self, other, "DIV", False))
1456 else:
1457 raise TypeError("Other input is not a scantable or float value")
1458 s._add_history("operator /", varlist)
1459 print_log()
1460 return s
1461
1462 def get_fit(self, row=0):
1463 """
1464 Print or return the stored fits for a row in the scantable
1465 Parameters:
1466 row: the row which the fit has been applied to.
1467 """
1468 if row > self.nrow():
1469 return
1470 from asap.asapfit import asapfit
1471 fit = asapfit(self._getfit(row))
1472 if rcParams['verbose']:
1473 print fit
1474 return
1475 else:
1476 return fit.as_dict()
1477
1478 def _add_history(self, funcname, parameters):
1479 # create date
1480 sep = "##"
1481 from datetime import datetime
1482 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1483 hist = dstr+sep
1484 hist += funcname+sep#cdate+sep
1485 if parameters.has_key('self'): del parameters['self']
1486 for k,v in parameters.iteritems():
1487 if type(v) is dict:
1488 for k2,v2 in v.iteritems():
1489 hist += k2
1490 hist += "="
1491 if isinstance(v2,scantable):
1492 hist += 'scantable'
1493 elif k2 == 'mask':
1494 if isinstance(v2,list) or isinstance(v2,tuple):
1495 hist += str(self._zip_mask(v2))
1496 else:
1497 hist += str(v2)
1498 else:
1499 hist += str(v2)
1500 else:
1501 hist += k
1502 hist += "="
1503 if isinstance(v,scantable):
1504 hist += 'scantable'
1505 elif k == 'mask':
1506 if isinstance(v,list) or isinstance(v,tuple):
1507 hist += str(self._zip_mask(v))
1508 else:
1509 hist += str(v)
1510 else:
1511 hist += str(v)
1512 hist += sep
1513 hist = hist[:-2] # remove trailing '##'
1514 self._addhistory(hist)
1515
1516
1517 def _zip_mask(self, mask):
1518 mask = list(mask)
1519 i = 0
1520 segments = []
1521 while mask[i:].count(1):
1522 i += mask[i:].index(1)
1523 if mask[i:].count(0):
1524 j = i + mask[i:].index(0)
1525 else:
1526 j = len(mask)
1527 segments.append([i,j])
1528 i = j
1529 return segments
1530
1531 def _get_ordinate_label(self):
1532 fu = "("+self.get_fluxunit()+")"
1533 import re
1534 lbl = "Intensity"
1535 if re.match(".K.",fu):
1536 lbl = "Brightness Temperature "+ fu
1537 elif re.match(".Jy.",fu):
1538 lbl = "Flux density "+ fu
1539 return lbl
1540
1541 def _check_ifs(self):
1542 nchans = [self.nchan(i) for i in range(self.nif(-1))]
1543 nchans = filter(lambda t: t > 0, nchans)
1544 return (sum(nchans)/len(nchans) == nchans[0])
1545
1546 def _fill(self, names, unit, average):
1547 import os
1548 varlist = vars()
1549 from asap._asap import stfiller
1550 first = True
1551 fullnames = []
1552 for name in names:
1553 name = os.path.expandvars(name)
1554 name = os.path.expanduser(name)
1555 if not os.path.exists(name):
1556 msg = "File '%s' does not exists" % (name)
1557 if rcParams['verbose']:
1558 asaplog.push(msg)
1559 print asaplog.pop().strip()
1560 return
1561 raise IOError(msg)
1562 fullnames.append(name)
1563 if average:
1564 asaplog.push('Auto averaging integrations')
1565 stype = int(rcParams['scantable.storage'].lower() == 'disk')
1566 for name in fullnames:
1567 tbl = Scantable(stype)
1568 r = stfiller(tbl)
1569 msg = "Importing %s..." % (name)
1570 asaplog.push(msg,False)
1571 print_log()
1572 r._open(name,-1,-1)
1573 r._read()
1574 #tbl = r._getdata()
1575 if average:
1576 tbl = self._math._average((tbl,),(),'NONE','SCAN')
1577 #tbl = tbl2
1578 if not first:
1579 tbl = self._math._merge([self, tbl])
1580 #tbl = tbl2
1581 Scantable.__init__(self, tbl)
1582 r._close()
1583 del r,tbl
1584 first = False
1585 if unit is not None:
1586 self.set_fluxunit(unit)
1587 self.set_freqframe(rcParams['scantable.freqframe'])
1588 #self._add_history("scantable", varlist)
1589
Note: See TracBrowser for help on using the repository browser.