source: trunk/python/scantable.py@ 1186

Last change on this file since 1186 was 1175, checked in by mar637, 18 years ago

various changes to support the pylab plotter 'xyplotter'

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 60.3 KB
Line 
1from asap._asap import Scantable
2from asap import rcParams
3from asap import print_log
4from asap import asaplog
5from asap import selector
6from asap import NUM
7from asap import linecatalog
8
9class scantable(Scantable):
10 """
11 The ASAP container for scans
12 """
13
14 def __init__(self, filename, average=None, unit=None):
15 """
16 Create a scantable from a saved one or make a reference
17 Parameters:
18 filename: the name of an asap table on disk
19 or
20 the name of a rpfits/sdfits/ms file
21 (integrations within scans are auto averaged
22 and the whole file is read)
23 or
24 [advanced] a reference to an existing
25 scantable
26 average: average all integrations withinb a scan on read.
27 The default (True) is taken from .asaprc.
28 unit: brightness unit; must be consistent with K or Jy.
29 Over-rides the default selected by the reader
30 (input rpfits/sdfits/ms) or replaces the value
31 in existing scantables
32 """
33 if average is None:
34 average = rcParams['scantable.autoaverage']
35 #varlist = vars()
36 from asap._asap import stmath
37 self._math = stmath()
38 if isinstance(filename, Scantable):
39 Scantable.__init__(self, filename)
40 else:
41 if isinstance(filename, str):
42 import os.path
43 filename = os.path.expandvars(filename)
44 filename = os.path.expanduser(filename)
45 if not os.path.exists(filename):
46 s = "File '%s' not found." % (filename)
47 if rcParams['verbose']:
48 asaplog.push(s)
49 print asaplog.pop().strip()
50 return
51 raise IOError(s)
52 if os.path.isdir(filename) \
53 and not os.path.exists(filename+'/table.f1'):
54 # crude check if asap table
55 if os.path.exists(filename+'/table.info'):
56 ondisk = rcParams['scantable.storage'] == 'disk'
57 Scantable.__init__(self, filename, ondisk)
58 if unit is not None:
59 self.set_fluxunit(unit)
60 self.set_freqframe(rcParams['scantable.freqframe'])
61 else:
62 msg = "The given file '%s'is not a valid " \
63 "asap table." % (filename)
64 if rcParams['verbose']:
65 print msg
66 return
67 else:
68 raise IOError(msg)
69 else:
70 self._fill([filename], unit, average)
71 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
72 and isinstance(filename[-1], str):
73 self._fill(filename, unit, average)
74 print_log()
75
76 def save(self, name=None, format=None, overwrite=False):
77 """
78 Store the scantable on disk. This can be an asap (aips++) Table, SDFITS,
79 Image FITS or MS2 format.
80 Parameters:
81 name: the name of the outputfile. For format "ASCII"
82 this is the root file name (data in 'name'.txt
83 and header in 'name'_header.txt)
84 format: an optional file format. Default is ASAP.
85 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
86 'SDFITS' (save as SDFITS file)
87 'ASCII' (saves as ascii text file)
88 'MS2' (saves as an aips++
89 MeasurementSet V2)
90 overwrite: If the file should be overwritten if it exists.
91 The default False is to return with warning
92 without writing the output. USE WITH CARE.
93 Example:
94 scan.save('myscan.asap')
95 scan.save('myscan.sdfits', 'SDFITS')
96 """
97 from os import path
98 if format is None: format = rcParams['scantable.save']
99 suffix = '.'+format.lower()
100 if name is None or name == "":
101 name = 'scantable'+suffix
102 msg = "No filename given. Using default name %s..." % name
103 asaplog.push(msg)
104 name = path.expandvars(name)
105 if path.isfile(name) or path.isdir(name):
106 if not overwrite:
107 msg = "File %s exists." % name
108 if rcParams['verbose']:
109 print msg
110 return
111 else:
112 raise IOError(msg)
113 format2 = format.upper()
114 if format2 == 'ASAP':
115 self._save(name)
116 else:
117 from asap._asap import stwriter as stw
118 writer = stw(format2)
119 writer.write(self, name)
120 print_log()
121 return
122
123 def copy(self):
124 """
125 Return a copy of this scantable.
126 Parameters:
127 none
128 Example:
129 copiedscan = scan.copy()
130 """
131 sd = scantable(Scantable._copy(self))
132 return sd
133
134 def drop_scan(self, scanid=None):
135 """
136 Return a new scantable where the specified scan number(s) has(have)
137 been dropped.
138 Parameters:
139 scanid: a (list of) scan number(s)
140 """
141 from asap import _is_sequence_or_number as _is_valid
142 from asap import _to_list
143 from asap import unique
144 if not _is_valid(scanid):
145 if rcParams['verbose']:
146 print "Please specify a scanno to drop from the scantable"
147 return
148 else:
149 raise RuntimeError("No scan given")
150 try:
151 scanid = _to_list(scanid)
152 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
153 for sid in scanid: allscans.remove(sid)
154 if len(allscans) == 0:
155 raise ValueError("Can't remove all scans")
156 except ValueError:
157 if rcParams['verbose']:
158 print "Couldn't find any match."
159 return
160 else: raise
161 try:
162 bsel = self.get_selection()
163 sel = selector()
164 sel.set_scans(allscans)
165 self.set_selection(bsel+sel)
166 scopy = self._copy()
167 self.set_selection(bsel)
168 return scantable(scopy)
169 except RuntimeError:
170 if rcParams['verbose']:
171 print "Couldn't find any match."
172 else:
173 raise
174
175
176 def get_scan(self, scanid=None):
177 """
178 Return a specific scan (by scanno) or collection of scans (by
179 source name) in a new scantable.
180 Parameters:
181 scanid: a (list of) scanno or a source name, unix-style
182 patterns are accepted for source name matching, e.g.
183 '*_R' gets all 'ref scans
184 Example:
185 # get all scans containing the source '323p459'
186 newscan = scan.get_scan('323p459')
187 # get all 'off' scans
188 refscans = scan.get_scan('*_R')
189 # get a susbset of scans by scanno (as listed in scan.summary())
190 newscan = scan.get_scan([0, 2, 7, 10])
191 """
192 if scanid is None:
193 if rcParams['verbose']:
194 print "Please specify a scan no or name to " \
195 "retrieve from the scantable"
196 return
197 else:
198 raise RuntimeError("No scan given")
199
200 try:
201 bsel = self.get_selection()
202 sel = selector()
203 if type(scanid) is str:
204 sel.set_name(scanid)
205 self.set_selection(bsel+sel)
206 scopy = self._copy()
207 self.set_selection(bsel)
208 return scantable(scopy)
209 elif type(scanid) is int:
210 sel.set_scans([scanid])
211 self.set_selection(bsel+sel)
212 scopy = self._copy()
213 self.set_selection(bsel)
214 return scantable(scopy)
215 elif type(scanid) is list:
216 sel.set_scans(scanid)
217 self.set_selection(sel)
218 scopy = self._copy()
219 self.set_selection(bsel)
220 return scantable(scopy)
221 else:
222 msg = "Illegal scanid type, use 'int' or 'list' if ints."
223 if rcParams['verbose']:
224 print msg
225 else:
226 raise TypeError(msg)
227 except RuntimeError:
228 if rcParams['verbose']: print "Couldn't find any match."
229 else: raise
230
231 def __str__(self):
232 return Scantable._summary(self, True)
233
234 def summary(self, filename=None):
235 """
236 Print a summary of the contents of this scantable.
237 Parameters:
238 filename: the name of a file to write the putput to
239 Default - no file output
240 verbose: print extra info such as the frequency table
241 The default (False) is taken from .asaprc
242 """
243 info = Scantable._summary(self, True)
244 #if verbose is None: verbose = rcParams['scantable.verbosesummary']
245 if filename is not None:
246 if filename is "":
247 filename = 'scantable_summary.txt'
248 from os.path import expandvars, isdir
249 filename = expandvars(filename)
250 if not isdir(filename):
251 data = open(filename, 'w')
252 data.write(info)
253 data.close()
254 else:
255 msg = "Illegal file name '%s'." % (filename)
256 if rcParams['verbose']:
257 print msg
258 else:
259 raise IOError(msg)
260 if rcParams['verbose']:
261 try:
262 from IPython.genutils import page as pager
263 except ImportError:
264 from pydoc import pager
265 pager(info)
266 else:
267 return info
268
269
270 def get_selection(self):
271 """
272 Get the selection object currently set on this scantable.
273 Parameters:
274 none
275 Example:
276 sel = scan.get_selection()
277 sel.set_ifs(0) # select IF 0
278 scan.set_selection(sel) # apply modified selection
279 """
280 return selector(self._getselection())
281
282 def set_selection(self, selection=selector()):
283 """
284 Select a subset of the data. All following operations on this scantable
285 are only applied to thi selection.
286 Parameters:
287 selection: a selector object (default unset the selection)
288 Examples:
289 sel = selector() # create a selection object
290 self.set_scans([0, 3]) # select SCANNO 0 and 3
291 scan.set_selection(sel) # set the selection
292 scan.summary() # will only print summary of scanno 0 an 3
293 scan.set_selection() # unset the selection
294 """
295 self._setselection(selection)
296
297 def stats(self, stat='stddev', mask=None):
298 """
299 Determine the specified statistic of the current beam/if/pol
300 Takes a 'mask' as an optional parameter to specify which
301 channels should be excluded.
302 Parameters:
303 stat: 'min', 'max', 'sumsq', 'sum', 'mean'
304 'var', 'stddev', 'avdev', 'rms', 'median'
305 mask: an optional mask specifying where the statistic
306 should be determined.
307 Example:
308 scan.set_unit('channel')
309 msk = scan.create_mask([100, 200], [500, 600])
310 scan.stats(stat='mean', mask=m)
311 """
312 if mask == None:
313 mask = []
314 axes = ['Beam', 'IF', 'Pol', 'Time']
315 if not self._check_ifs():
316 raise ValueError("Cannot apply mask as the IFs have different "
317 "number of channels. Please use setselection() "
318 "to select individual IFs")
319
320 statvals = self._math._stats(self, mask, stat)
321 out = ''
322 axes = []
323 for i in range(self.nrow()):
324 axis = []
325 axis.append(self.getscan(i))
326 axis.append(self.getbeam(i))
327 axis.append(self.getif(i))
328 axis.append(self.getpol(i))
329 axis.append(self.getcycle(i))
330 axes.append(axis)
331 tm = self._gettime(i)
332 src = self._getsourcename(i)
333 out += 'Scan[%d] (%s) ' % (axis[0], src)
334 out += 'Time[%s]:\n' % (tm)
335 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
336 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
337 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
338 out += '= %3.3f\n' % (statvals[i])
339 out += "--------------------------------------------------\n"
340
341 if rcParams['verbose']:
342 print "--------------------------------------------------"
343 print " ", stat
344 print "--------------------------------------------------"
345 print out
346 retval = { 'axesnames': ['scanno', 'beamno', 'ifno', 'polno', 'cycleno'],
347 'axes' : axes,
348 'data': statvals}
349 return retval
350
351 def stddev(self, mask=None):
352 """
353 Determine the standard deviation of the current beam/if/pol
354 Takes a 'mask' as an optional parameter to specify which
355 channels should be excluded.
356 Parameters:
357 mask: an optional mask specifying where the standard
358 deviation should be determined.
359
360 Example:
361 scan.set_unit('channel')
362 msk = scan.create_mask([100, 200], [500, 600])
363 scan.stddev(mask=m)
364 """
365 return self.stats(stat='stddev', mask=mask);
366
367
368 def column_names(self):
369 """
370 Return a list of column names, which can be used for selection.
371 """
372 return list(Scantable.column_names(self))
373
374 def get_tsys(self):
375 """
376 Return the System temperatures.
377 Parameters:
378
379 Returns:
380 a list of Tsys values for the current selection
381 """
382
383 return self._row_callback(self._gettsys, "Tsys")
384
385 def _row_callback(self, callback, label):
386 axes = []
387 axesnames = ['scanno', 'beamno', 'ifno', 'polno', 'cycleno']
388 out = ""
389 outvec = []
390 for i in range(self.nrow()):
391 axis = []
392 axis.append(self.getscan(i))
393 axis.append(self.getbeam(i))
394 axis.append(self.getif(i))
395 axis.append(self.getpol(i))
396 axis.append(self.getcycle(i))
397 axes.append(axis)
398 tm = self._gettime(i)
399 src = self._getsourcename(i)
400 out += 'Scan[%d] (%s) ' % (axis[0], src)
401 out += 'Time[%s]:\n' % (tm)
402 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
403 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
404 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
405 outvec.append(callback(i))
406 out += '= %3.3f\n' % (outvec[i])
407 out += "--------------------------------------------------\n"
408 if rcParams['verbose']:
409 print "--------------------------------------------------"
410 print " %s" % (label)
411 print "--------------------------------------------------"
412 print out
413 # disabled because the vector seems more useful
414 #retval = {'axesnames': axesnames, 'axes': axes, 'data': outvec}
415 return outvec
416
417 def _get_column(self, callback, row=-1):
418 """
419 """
420 if row == -1:
421 return [callback(i) for i in range(self.nrow())]
422 else:
423 if 0 <= row < self.nrow():
424 return callback(row)
425
426
427 def get_time(self, row=-1):
428 """
429 Get a list of time stamps for the observations.
430 Return a string for each integration in the scantable.
431 Parameters:
432 row: row no of integration. Default -1 return all rows
433 Example:
434 none
435 """
436 from time import strptime
437 from datetime import datetime
438 times = self._get_column(self._gettime, row)
439 format = "%Y/%m/%d/%H:%M:%S"
440 if isinstance(times, list):
441 return [datetime(*strptime(i, format)[:6]) for i in times]
442 else:
443 return datetime(*strptime(times, format)[:6])
444
445 def get_sourcename(self, row=-1):
446 """
447 Get a list source names for the observations.
448 Return a string for each integration in the scantable.
449 Parameters:
450 row: row no of integration. Default -1 return all rows
451 Example:
452 none
453 """
454 return self._get_column(self._getsourcename, row)
455
456 def get_elevation(self, row=-1):
457 """
458 Get a list of elevations for the observations.
459 Return a float for each integration in the scantable.
460 Parameters:
461 row: row no of integration. Default -1 return all rows
462 Example:
463 none
464 """
465 return self._get_column(self._getelevation, row)
466
467 def get_azimuth(self, row=-1):
468 """
469 Get a list of azimuths for the observations.
470 Return a float for each integration in the scantable.
471 Parameters:
472 row: row no of integration. Default -1 return all rows
473 Example:
474 none
475 """
476 return self._get_column(self._getazimuth, row)
477
478 def get_parangle(self, row=-1):
479 """
480 Get a list of parallactic angles for the observations.
481 Return a float for each integration in the scantable.
482 Parameters:
483 row: row no of integration. Default -1 return all rows
484 Example:
485 none
486 """
487 return self._get_column(self._getparangle, row)
488
489 def get_direction(self, row=-1):
490 """
491 Get a list of Positions on the sky (direction) for the observations.
492 Return a float for each integration in the scantable.
493 Parameters:
494 row: row no of integration. Default -1 return all rows
495 Example:
496 none
497 """
498 return self._get_column(self._getdirection, row)
499
500 def set_unit(self, unit='channel'):
501 """
502 Set the unit for all following operations on this scantable
503 Parameters:
504 unit: optional unit, default is 'channel'
505 one of '*Hz', 'km/s', 'channel', ''
506 """
507 varlist = vars()
508 if unit in ['', 'pixel', 'channel']:
509 unit = ''
510 inf = list(self._getcoordinfo())
511 inf[0] = unit
512 self._setcoordinfo(inf)
513 self._add_history("set_unit", varlist)
514
515 def set_instrument(self, instr):
516 """
517 Set the instrument for subsequent processing
518 Parameters:
519 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
520 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
521 """
522 self._setInstrument(instr)
523 self._add_history("set_instument", vars())
524 print_log()
525
526 def set_doppler(self, doppler='RADIO'):
527 """
528 Set the doppler for all following operations on this scantable.
529 Parameters:
530 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
531 """
532 varlist = vars()
533 inf = list(self._getcoordinfo())
534 inf[2] = doppler
535 self._setcoordinfo(inf)
536 self._add_history("set_doppler", vars())
537 print_log()
538
539 def set_freqframe(self, frame=None):
540 """
541 Set the frame type of the Spectral Axis.
542 Parameters:
543 frame: an optional frame type, default 'LSRK'. Valid frames are:
544 'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
545 'GEO', 'GALACTO', 'LGROUP', 'CMB'
546 Examples:
547 scan.set_freqframe('BARY')
548 """
549 if frame is None: frame = rcParams['scantable.freqframe']
550 varlist = vars()
551 valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
552 'GEO', 'GALACTO', 'LGROUP', 'CMB']
553
554 if frame in valid:
555 inf = list(self._getcoordinfo())
556 inf[1] = frame
557 self._setcoordinfo(inf)
558 self._add_history("set_freqframe", varlist)
559 else:
560 msg = "Please specify a valid freq type. Valid types are:\n", valid
561 if rcParams['verbose']:
562 print msg
563 else:
564 raise TypeError(msg)
565 print_log()
566
567 def set_dirframe(self, frame=""):
568 """
569 Set the frame type of the Direction on the sky.
570 Parameters:
571 frame: an optional frame type, default ''. Valid frames are:
572 'J2000', 'B1950', 'GALACTIC'
573 Examples:
574 scan.set_dirframe('GALACTIC')
575 """
576 varlist = vars()
577 try:
578 Scantable.set_dirframe(self, frame)
579 except RuntimeError, msg:
580 if rcParams['verbose']:
581 print msg
582 else:
583 raise
584 self._add_history("set_dirframe", varlist)
585
586 def get_unit(self):
587 """
588 Get the default unit set in this scantable
589 Parameters:
590 Returns:
591 A unit string
592 """
593 inf = self._getcoordinfo()
594 unit = inf[0]
595 if unit == '': unit = 'channel'
596 return unit
597
598 def get_abcissa(self, rowno=0):
599 """
600 Get the abcissa in the current coordinate setup for the currently
601 selected Beam/IF/Pol
602 Parameters:
603 rowno: an optional row number in the scantable. Default is the
604 first row, i.e. rowno=0
605 Returns:
606 The abcissa values and it's format string (as a dictionary)
607 """
608 abc = self._getabcissa(rowno)
609 lbl = self._getabcissalabel(rowno)
610 print_log()
611 return abc, lbl
612
613 def flag(self, mask=None):
614 """
615 Flag the selected data using an optional channel mask.
616 Parameters:
617 mask: an optional channel mask, created with create_mask. Default
618 (no mask) is all channels.
619 """
620 varlist = vars()
621 if mask is None:
622 mask = []
623 try:
624 self._flag(mask)
625 except RuntimeError, msg:
626 if rcParams['verbose']:
627 print msg
628 return
629 else: raise
630 self._add_history("flag", varlist)
631
632
633 def create_mask(self, *args, **kwargs):
634 """
635 Compute and return a mask based on [min, max] windows.
636 The specified windows are to be INCLUDED, when the mask is
637 applied.
638 Parameters:
639 [min, max], [min2, max2], ...
640 Pairs of start/end points (inclusive)specifying the regions
641 to be masked
642 invert: optional argument. If specified as True,
643 return an inverted mask, i.e. the regions
644 specified are EXCLUDED
645 row: create the mask using the specified row for
646 unit conversions, default is row=0
647 only necessary if frequency varies over rows.
648 Example:
649 scan.set_unit('channel')
650 a)
651 msk = scan.create_mask([400, 500], [800, 900])
652 # masks everything outside 400 and 500
653 # and 800 and 900 in the unit 'channel'
654
655 b)
656 msk = scan.create_mask([400, 500], [800, 900], invert=True)
657 # masks the regions between 400 and 500
658 # and 800 and 900 in the unit 'channel'
659 c)
660 mask only channel 400
661 msk = scan.create_mask([400, 400])
662 """
663 row = 0
664 if kwargs.has_key("row"):
665 row = kwargs.get("row")
666 data = self._getabcissa(row)
667 u = self._getcoordinfo()[0]
668 if rcParams['verbose']:
669 if u == "": u = "channel"
670 msg = "The current mask window unit is %s" % u
671 i = self._check_ifs()
672 if not i:
673 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
674 asaplog.push(msg)
675 n = self.nchan()
676 msk = NUM.zeros(n)
677 # test if args is a 'list' or a 'normal *args - UGLY!!!
678
679 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
680 and args or args[0]
681 for window in ws:
682 if (len(window) != 2 or window[0] > window[1] ):
683 raise TypeError("A window needs to be defined as [min, max]")
684 for i in range(n):
685 if data[i] >= window[0] and data[i] <= window[1]:
686 msk[i] = 1
687 if kwargs.has_key('invert'):
688 if kwargs.get('invert'):
689 msk = NUM.logical_not(msk)
690 print_log()
691 return msk
692
693 def get_restfreqs(self):
694 """
695 Get the restfrequency(s) stored in this scantable.
696 The return value(s) are always of unit 'Hz'
697 Parameters:
698 none
699 Returns:
700 a list of doubles
701 """
702 return list(self._getrestfreqs())
703
704
705 def set_restfreqs(self, freqs=None, unit='Hz'):
706 """
707 Set or replace the restfrequency specified and
708 If the 'freqs' argument holds a scalar,
709 then that rest frequency will be applied to all the selected
710 data. If the 'freqs' argument holds
711 a vector, then it MUST be of equal or smaller length than
712 the number of IFs (and the available restfrequencies will be
713 replaced by this vector). In this case, *all* data have
714 the restfrequency set per IF according
715 to the corresponding value you give in the 'freqs' vector.
716 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
717 IF 1 gets restfreq 2e9.
718 You can also specify the frequencies via a linecatalog/
719
720 Parameters:
721 freqs: list of rest frequency values or string idenitfiers
722 unit: unit for rest frequency (default 'Hz')
723
724 Example:
725 # set the given restfrequency for the whole table
726 scan.set_restfreqs(freqs=1.4e9)
727 # If thee number of IFs in the data is >= 2 the IF0 gets the first
728 # value IF1 the second...
729 scan.set_restfreqs(freqs=[1.4e9, 1.67e9])
730 #set the given restfrequency for the whole table (by name)
731 scan.set_restfreqs(freqs="OH1667")
732
733 Note:
734 To do more sophisticate Restfrequency setting, e.g. on a
735 source and IF basis, use scantable.set_selection() before using
736 this function.
737 # provide your scantable is call scan
738 selection = selector()
739 selection.set_name("ORION*")
740 selection.set_ifs([1])
741 scan.set_selection(selection)
742 scan.set_restfreqs(freqs=86.6e9)
743
744 """
745 varlist = vars()
746 from asap import linecatalog
747 # simple value
748 if isinstance(freqs, int) or isinstance(freqs, float):
749 self._setrestfreqs(freqs, "",unit)
750 # list of values
751 elif isinstance(freqs, list) or isinstance(freqs, tuple):
752 # list values are scalars
753 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
754 sel = selector()
755 savesel = self._getselection()
756 for i in xrange(len(freqs)):
757 sel.set_ifs([i])
758 self._setselection(sel)
759 self._setrestfreqs(freqs[i], "",unit)
760 self._setselection(savesel)
761 # list values are tuples, (value, name)
762 elif isinstance(freqs[-1], dict):
763 sel = selector()
764 savesel = self._getselection()
765 for i in xrange(len(freqs)):
766 sel.set_ifs([i])
767 self._setrestfreqs(freqs[i]["value"],
768 freqs[i]["name"], "MHz")
769 self._setselection(sel)
770 self._setselection(savesel)
771 # freqs are to be taken from a linecatalog
772 elif isinstance(freqs, linecatalog):
773 sel = selector()
774 savesel = self._getselection()
775 for i in xrange(freqs.nrow()):
776 sel.set_ifs([i])
777 self._setselection(sel)
778 self._setrestfreqs(freqs.get_frequency(i),
779 freqs.get_name(i), "MHz")
780 # ensure that we are not iterating past nIF
781 if i == self.nif()-1: break
782 self._setselection(savesel)
783 else:
784 return
785 self._add_history("set_restfreqs", varlist)
786
787
788
789 def history(self):
790 hist = list(self._gethistory())
791 out = "-"*80
792 for h in hist:
793 if h.startswith("---"):
794 out += "\n"+h
795 else:
796 items = h.split("##")
797 date = items[0]
798 func = items[1]
799 items = items[2:]
800 out += "\n"+date+"\n"
801 out += "Function: %s\n Parameters:" % (func)
802 for i in items:
803 s = i.split("=")
804 out += "\n %s = %s" % (s[0], s[1])
805 out += "\n"+"-"*80
806 try:
807 from IPython.genutils import page as pager
808 except ImportError:
809 from pydoc import pager
810 pager(out)
811 return
812
813 #
814 # Maths business
815 #
816
817 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
818 """
819 Return the (time) weighted average of a scan.
820 Note:
821 in channels only - align if necessary
822 Parameters:
823 mask: an optional mask (only used for 'var' and 'tsys'
824 weighting)
825 scanav: True averages each scan separately
826 False (default) averages all scans together,
827 weight: Weighting scheme.
828 'none' (mean no weight)
829 'var' (1/var(spec) weighted)
830 'tsys' (1/Tsys**2 weighted)
831 'tint' (integration time weighted)
832 'tintsys' (Tint/Tsys**2)
833 'median' ( median averaging)
834 The default is 'tint'
835 align: align the spectra in velocity before averaging. It takes
836 the time of the first spectrum as reference time.
837 Example:
838 # time average the scantable without using a mask
839 newscan = scan.average_time()
840 """
841 varlist = vars()
842 if weight is None: weight = 'TINT'
843 if mask is None: mask = ()
844 if scanav: scanav = "SCAN"
845 else: scanav = "NONE"
846 scan = (self, )
847 try:
848 if align:
849 scan = (self.freq_align(insitu=False), )
850 s = None
851 if weight.upper() == 'MEDIAN':
852 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
853 scanav))
854 else:
855 s = scantable(self._math._average(scan, mask, weight.upper(),
856 scanav))
857 except RuntimeError, msg:
858 if rcParams['verbose']:
859 print msg
860 return
861 else: raise
862 s._add_history("average_time", varlist)
863 print_log()
864 return s
865
866 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
867 """
868 Return a scan where all spectra are converted to either
869 Jansky or Kelvin depending upon the flux units of the scan table.
870 By default the function tries to look the values up internally.
871 If it can't find them (or if you want to over-ride), you must
872 specify EITHER jyperk OR eta (and D which it will try to look up
873 also if you don't set it). jyperk takes precedence if you set both.
874 Parameters:
875 jyperk: the Jy / K conversion factor
876 eta: the aperture efficiency
877 d: the geomtric diameter (metres)
878 insitu: if False a new scantable is returned.
879 Otherwise, the scaling is done in-situ
880 The default is taken from .asaprc (False)
881 allaxes: if True apply to all spectra. Otherwise
882 apply only to the selected (beam/pol/if)spectra only
883 The default is taken from .asaprc (True if none)
884 """
885 if insitu is None: insitu = rcParams['insitu']
886 self._math._setinsitu(insitu)
887 varlist = vars()
888 if jyperk is None: jyperk = -1.0
889 if d is None: d = -1.0
890 if eta is None: eta = -1.0
891 s = scantable(self._math._convertflux(self, d, eta, jyperk))
892 s._add_history("convert_flux", varlist)
893 print_log()
894 if insitu: self._assign(s)
895 else: return s
896
897 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
898 """
899 Return a scan after applying a gain-elevation correction.
900 The correction can be made via either a polynomial or a
901 table-based interpolation (and extrapolation if necessary).
902 You specify polynomial coefficients, an ascii table or neither.
903 If you specify neither, then a polynomial correction will be made
904 with built in coefficients known for certain telescopes (an error
905 will occur if the instrument is not known).
906 The data and Tsys are *divided* by the scaling factors.
907 Parameters:
908 poly: Polynomial coefficients (default None) to compute a
909 gain-elevation correction as a function of
910 elevation (in degrees).
911 filename: The name of an ascii file holding correction factors.
912 The first row of the ascii file must give the column
913 names and these MUST include columns
914 "ELEVATION" (degrees) and "FACTOR" (multiply data
915 by this) somewhere.
916 The second row must give the data type of the
917 column. Use 'R' for Real and 'I' for Integer.
918 An example file would be
919 (actual factors are arbitrary) :
920
921 TIME ELEVATION FACTOR
922 R R R
923 0.1 0 0.8
924 0.2 20 0.85
925 0.3 40 0.9
926 0.4 60 0.85
927 0.5 80 0.8
928 0.6 90 0.75
929 method: Interpolation method when correcting from a table.
930 Values are "nearest", "linear" (default), "cubic"
931 and "spline"
932 insitu: if False a new scantable is returned.
933 Otherwise, the scaling is done in-situ
934 The default is taken from .asaprc (False)
935 """
936
937 if insitu is None: insitu = rcParams['insitu']
938 self._math._setinsitu(insitu)
939 varlist = vars()
940 if poly is None:
941 poly = ()
942 from os.path import expandvars
943 filename = expandvars(filename)
944 s = scantable(self._math._gainel(self, poly, filename, method))
945 s._add_history("gain_el", varlist)
946 print_log()
947 if insitu: self._assign(s)
948 else: return s
949
950 def freq_align(self, reftime=None, method='cubic', insitu=None):
951 """
952 Return a scan where all rows have been aligned in frequency/velocity.
953 The alignment frequency frame (e.g. LSRK) is that set by function
954 set_freqframe.
955 Parameters:
956 reftime: reference time to align at. By default, the time of
957 the first row of data is used.
958 method: Interpolation method for regridding the spectra.
959 Choose from "nearest", "linear", "cubic" (default)
960 and "spline"
961 insitu: if False a new scantable is returned.
962 Otherwise, the scaling is done in-situ
963 The default is taken from .asaprc (False)
964 """
965 if insitu is None: insitu = rcParams["insitu"]
966 self._math._setinsitu(insitu)
967 varlist = vars()
968 if reftime is None: reftime = ""
969 s = scantable(self._math._freq_align(self, reftime, method))
970 s._add_history("freq_align", varlist)
971 print_log()
972 if insitu: self._assign(s)
973 else: return s
974
975 def opacity(self, tau, insitu=None):
976 """
977 Apply an opacity correction. The data
978 and Tsys are multiplied by the correction factor.
979 Parameters:
980 tau: Opacity from which the correction factor is
981 exp(tau*ZD)
982 where ZD is the zenith-distance
983 insitu: if False a new scantable is returned.
984 Otherwise, the scaling is done in-situ
985 The default is taken from .asaprc (False)
986 """
987 if insitu is None: insitu = rcParams['insitu']
988 self._math._setinsitu(insitu)
989 varlist = vars()
990 s = scantable(self._math._opacity(self, tau))
991 s._add_history("opacity", varlist)
992 print_log()
993 if insitu: self._assign(s)
994 else: return s
995
996 def bin(self, width=5, insitu=None):
997 """
998 Return a scan where all spectra have been binned up.
999 width: The bin width (default=5) in pixels
1000 insitu: if False a new scantable is returned.
1001 Otherwise, the scaling is done in-situ
1002 The default is taken from .asaprc (False)
1003 """
1004 if insitu is None: insitu = rcParams['insitu']
1005 self._math._setinsitu(insitu)
1006 varlist = vars()
1007 s = scantable(self._math._bin(self, width))
1008 s._add_history("bin", varlist)
1009 print_log()
1010 if insitu: self._assign(s)
1011 else: return s
1012
1013
1014 def resample(self, width=5, method='cubic', insitu=None):
1015 """
1016 Return a scan where all spectra have been binned up
1017 width: The bin width (default=5) in pixels
1018 method: Interpolation method when correcting from a table.
1019 Values are "nearest", "linear", "cubic" (default)
1020 and "spline"
1021 insitu: if False a new scantable is returned.
1022 Otherwise, the scaling is done in-situ
1023 The default is taken from .asaprc (False)
1024 """
1025 if insitu is None: insitu = rcParams['insitu']
1026 self._math._setinsitu(insitu)
1027 varlist = vars()
1028 s = scantable(self._math._resample(self, method, width))
1029 s._add_history("resample", varlist)
1030 print_log()
1031 if insitu: self._assign(s)
1032 else: return s
1033
1034
1035 def average_pol(self, mask=None, weight='none'):
1036 """
1037 Average the Polarisations together.
1038 Parameters:
1039 mask: An optional mask defining the region, where the
1040 averaging will be applied. The output will have all
1041 specified points masked.
1042 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1043 weighted), or 'tsys' (1/Tsys**2 weighted)
1044 """
1045 varlist = vars()
1046 if mask is None:
1047 mask = ()
1048 s = scantable(self._math._averagepol(self, mask, weight.upper()))
1049 s._add_history("average_pol", varlist)
1050 print_log()
1051 return s
1052
1053 def average_beam(self, mask=None, weight='none'):
1054 """
1055 Average the Beams together.
1056 Parameters:
1057 mask: An optional mask defining the region, where the
1058 averaging will be applied. The output will have all
1059 specified points masked.
1060 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1061 weighted), or 'tsys' (1/Tsys**2 weighted)
1062 """
1063 varlist = vars()
1064 if mask is None:
1065 mask = ()
1066 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1067 s._add_history("average_beam", varlist)
1068 print_log()
1069 return s
1070
1071 def convert_pol(self, poltype=None):
1072 """
1073 Convert the data to a different polarisation type.
1074 Parameters:
1075 poltype: The new polarisation type. Valid types are:
1076 "linear", "stokes" and "circular"
1077 """
1078 varlist = vars()
1079 try:
1080 s = scantable(self._math._convertpol(self, poltype))
1081 except RuntimeError, msg:
1082 if rcParams['verbose']:
1083 print msg
1084 return
1085 else:
1086 raise
1087 s._add_history("convert_pol", varlist)
1088 print_log()
1089 return s
1090
1091 def smooth(self, kernel="hanning", width=5.0, insitu=None):
1092 """
1093 Smooth the spectrum by the specified kernel (conserving flux).
1094 Parameters:
1095 scan: The input scan
1096 kernel: The type of smoothing kernel. Select from
1097 'hanning' (default), 'gaussian' and 'boxcar'.
1098 The first three characters are sufficient.
1099 width: The width of the kernel in pixels. For hanning this is
1100 ignored otherwise it defauls to 5 pixels.
1101 For 'gaussian' it is the Full Width Half
1102 Maximum. For 'boxcar' it is the full width.
1103 insitu: if False a new scantable is returned.
1104 Otherwise, the scaling is done in-situ
1105 The default is taken from .asaprc (False)
1106 Example:
1107 none
1108 """
1109 if insitu is None: insitu = rcParams['insitu']
1110 self._math._setinsitu(insitu)
1111 varlist = vars()
1112 s = scantable(self._math._smooth(self, kernel.lower(), width))
1113 s._add_history("smooth", varlist)
1114 print_log()
1115 if insitu: self._assign(s)
1116 else: return s
1117
1118
1119 def poly_baseline(self, mask=None, order=0, plot=False, insitu=None):
1120 """
1121 Return a scan which has been baselined (all rows) by a polynomial.
1122 Parameters:
1123 scan: a scantable
1124 mask: an optional mask
1125 order: the order of the polynomial (default is 0)
1126 plot: plot the fit and the residual. In this each
1127 indivual fit has to be approved, by typing 'y'
1128 or 'n'
1129 insitu: if False a new scantable is returned.
1130 Otherwise, the scaling is done in-situ
1131 The default is taken from .asaprc (False)
1132 Example:
1133 # return a scan baselined by a third order polynomial,
1134 # not using a mask
1135 bscan = scan.poly_baseline(order=3)
1136 """
1137 if insitu is None: insitu = rcParams['insitu']
1138 varlist = vars()
1139 if mask is None:
1140 mask = list(NUM.ones(self.nchan(-1)))
1141 from asap.asapfitter import fitter
1142 f = fitter()
1143 f.set_scan(self, mask)
1144 f.set_function(poly=order)
1145 s = f.auto_fit(insitu, plot=plot)
1146 s._add_history("poly_baseline", varlist)
1147 print_log()
1148 if insitu: self._assign(s)
1149 else: return s
1150
1151 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
1152 threshold=3, plot=False, insitu=None):
1153 """
1154 Return a scan which has been baselined (all rows) by a polynomial.
1155 Spectral lines are detected first using linefinder and masked out
1156 to avoid them affecting the baseline solution.
1157
1158 Parameters:
1159 mask: an optional mask retreived from scantable
1160 edge: an optional number of channel to drop at
1161 the edge of spectrum. If only one value is
1162 specified, the same number will be dropped from
1163 both sides of the spectrum. Default is to keep
1164 all channels. Nested tuples represent individual
1165 edge selection for different IFs (a number of spectral
1166 channels can be different)
1167 order: the order of the polynomial (default is 0)
1168 threshold: the threshold used by line finder. It is better to
1169 keep it large as only strong lines affect the
1170 baseline solution.
1171 plot: plot the fit and the residual. In this each
1172 indivual fit has to be approved, by typing 'y'
1173 or 'n'
1174 insitu: if False a new scantable is returned.
1175 Otherwise, the scaling is done in-situ
1176 The default is taken from .asaprc (False)
1177
1178 Example:
1179 scan2=scan.auto_poly_baseline(order=7)
1180 """
1181 if insitu is None: insitu = rcParams['insitu']
1182 varlist = vars()
1183 from asap.asapfitter import fitter
1184 from asap.asaplinefind import linefinder
1185 from asap import _is_sequence_or_number as _is_valid
1186
1187 # check whether edge is set up for each IF individually
1188 individualedge = False;
1189 if len(edge) > 1:
1190 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1191 individualedge = True;
1192
1193 if not _is_valid(edge, int) and not individualedge:
1194 raise ValueError, "Parameter 'edge' has to be an integer or a \
1195 pair of integers specified as a tuple. Nested tuples are allowed \
1196 to make individual selection for different IFs."
1197
1198 curedge = (0, 0)
1199 if individualedge:
1200 for edgepar in edge:
1201 if not _is_valid(edgepar, int):
1202 raise ValueError, "Each element of the 'edge' tuple has \
1203 to be a pair of integers or an integer."
1204 else:
1205 curedge = edge;
1206
1207 # setup fitter
1208 f = fitter()
1209 f.set_function(poly=order)
1210
1211 # setup line finder
1212 fl = linefinder()
1213 fl.set_options(threshold=threshold)
1214
1215 if not insitu:
1216 workscan = self.copy()
1217 else:
1218 workscan = self
1219
1220 fl.set_scan(workscan)
1221
1222 rows = range(workscan.nrow())
1223 asaplog.push("Processing:")
1224 for r in rows:
1225 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1226 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1227 workscan.getpol(r), workscan.getcycle(r))
1228 asaplog.push(msg, False)
1229
1230 # figure out edge parameter
1231 if individualedge:
1232 if len(edge) >= workscan.getif(r):
1233 raise RuntimeError, "Number of edge elements appear to " \
1234 "be less than the number of IFs"
1235 curedge = edge[workscan.getif(r)]
1236
1237 # setup line finder
1238 fl.find_lines(r, mask, curedge)
1239 f.set_scan(workscan, fl.get_mask())
1240 f.x = workscan._getabcissa(r)
1241 f.y = workscan._getspectrum(r)
1242 f.data = None
1243 f.fit()
1244 x = f.get_parameters()
1245 if plot:
1246 f.plot(residual=True)
1247 x = raw_input("Accept fit ( [y]/n ): ")
1248 if x.upper() == 'N':
1249 continue
1250 workscan._setspectrum(f.fitter.getresidual(), r)
1251 if plot:
1252 f._p.unmap()
1253 f._p = None
1254 workscan._add_history("auto_poly_baseline", varlist)
1255 if insitu:
1256 self._assign(workscan)
1257 else:
1258 return workscan
1259
1260 def rotate_linpolphase(self, angle):
1261 """
1262 Rotate the phase of the complex polarization O=Q+iU correlation.
1263 This is always done in situ in the raw data. So if you call this
1264 function more than once then each call rotates the phase further.
1265 Parameters:
1266 angle: The angle (degrees) to rotate (add) by.
1267 Examples:
1268 scan.rotate_linpolphase(2.3)
1269 """
1270 varlist = vars()
1271 self._math._rotate_linpolphase(self, angle)
1272 self._add_history("rotate_linpolphase", varlist)
1273 print_log()
1274 return
1275
1276
1277 def rotate_xyphase(self, angle):
1278 """
1279 Rotate the phase of the XY correlation. This is always done in situ
1280 in the data. So if you call this function more than once
1281 then each call rotates the phase further.
1282 Parameters:
1283 angle: The angle (degrees) to rotate (add) by.
1284 Examples:
1285 scan.rotate_xyphase(2.3)
1286 """
1287 varlist = vars()
1288 self._math._rotate_xyphase(self, angle)
1289 self._add_history("rotate_xyphase", varlist)
1290 print_log()
1291 return
1292
1293 def swap_linears(self):
1294 """
1295 Swap the linear polarisations XX and YY
1296 """
1297 varlist = vars()
1298 self._math._swap_linears(self)
1299 self._add_history("swap_linears", varlist)
1300 print_log()
1301 return
1302
1303 def invert_phase(self):
1304 """
1305 Invert the phase of the complex polarisation
1306 """
1307 varlist = vars()
1308 self._math._invert_phase(self)
1309 self._add_history("invert_phase", varlist)
1310 print_log()
1311 return
1312
1313 def add(self, offset, insitu=None):
1314 """
1315 Return a scan where all spectra have the offset added
1316 Parameters:
1317 offset: the offset
1318 insitu: if False a new scantable is returned.
1319 Otherwise, the scaling is done in-situ
1320 The default is taken from .asaprc (False)
1321 """
1322 if insitu is None: insitu = rcParams['insitu']
1323 self._math._setinsitu(insitu)
1324 varlist = vars()
1325 s = scantable(self._math._unaryop(self, offset, "ADD", False))
1326 s._add_history("add", varlist)
1327 print_log()
1328 if insitu:
1329 self._assign(s)
1330 else:
1331 return s
1332
1333 def scale(self, factor, tsys=True, insitu=None, ):
1334 """
1335 Return a scan where all spectra are scaled by the give 'factor'
1336 Parameters:
1337 factor: the scaling factor
1338 insitu: if False a new scantable is returned.
1339 Otherwise, the scaling is done in-situ
1340 The default is taken from .asaprc (False)
1341 tsys: if True (default) then apply the operation to Tsys
1342 as well as the data
1343 """
1344 if insitu is None: insitu = rcParams['insitu']
1345 self._math._setinsitu(insitu)
1346 varlist = vars()
1347 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
1348 s._add_history("scale", varlist)
1349 print_log()
1350 if insitu:
1351 self._assign(s)
1352 else:
1353 return s
1354
1355 def auto_quotient(self, mode='time', preserve=True):
1356 """
1357 This function allows to build quotients automatically.
1358 It assumes the observation to have the same numer of
1359 "ons" and "offs"
1360 It will support "closest off in time" in the future
1361 Parameters:
1362 mode: the on/off detection mode; 'suffix' (default)
1363 'suffix' identifies 'off' scans by the
1364 trailing '_R' (Mopra/Parkes) or
1365 '_e'/'_w' (Tid)
1366 preserve: you can preserve (default) the continuum or
1367 remove it. The equations used are
1368 preserve: Output = Toff * (on/off) - Toff
1369 remove: Output = Toff * (on/off) - Ton
1370 """
1371 modes = ["time"]
1372 if not mode in modes:
1373 msg = "please provide valid mode. Valid modes are %s" % (modes)
1374 raise ValueError(msg)
1375 varlist = vars()
1376 s = scantable(self._math._auto_quotient(self, mode, preserve))
1377 s._add_history("auto_quotient", varlist)
1378 print_log()
1379 return s
1380
1381 def mx_quotient(self, mask = None, weight='median', preserve=True):
1382 """
1383 Form a quotient using "off" beams when observing in "MX" mode.
1384 Parameters:
1385 mask: an optional mask to be used when weight == 'stddev'
1386 weight: How to average the off beams. Default is 'median'.
1387 preserve: you can preserve (default) the continuum or
1388 remove it. The equations used are
1389 preserve: Output = Toff * (on/off) - Toff
1390 remove: Output = Toff * (on/off) - Ton
1391 """
1392 if mask is None: mask = ()
1393 varlist = vars()
1394 on = scantable(self._math._mx_extract(self, 'on'))
1395 preoff = scantable(self._math._mx_extract(self, 'off'))
1396 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
1397 from asapmath import quotient
1398 q = quotient(on, off, preserve)
1399 q._add_history("mx_quotient", varlist)
1400 print_log()
1401 return q
1402
1403 def freq_switch(self, insitu=None):
1404 """
1405 Apply frequency switching to the data.
1406 Parameters:
1407 insitu: if False a new scantable is returned.
1408 Otherwise, the swictching is done in-situ
1409 The default is taken from .asaprc (False)
1410 Example:
1411 none
1412 """
1413 if insitu is None: insitu = rcParams['insitu']
1414 self._math._setinsitu(insitu)
1415 varlist = vars()
1416 s = scantable(self._math._freqswitch(self))
1417 s._add_history("freq_switch", varlist)
1418 print_log()
1419 if insitu: self._assign(s)
1420 else: return s
1421
1422 def recalc_azel(self):
1423 """
1424 Recalculate the azimuth and elevation for each position.
1425 Parameters:
1426 none
1427 Example:
1428 """
1429 varlist = vars()
1430 self._recalcazel()
1431 self._add_history("recalc_azel", varlist)
1432 print_log()
1433 return
1434
1435 def __add__(self, other):
1436 varlist = vars()
1437 s = None
1438 if isinstance(other, scantable):
1439 print "scantable + scantable NYI"
1440 return
1441 elif isinstance(other, float):
1442 s = scantable(self._math._unaryop(self, other, "ADD", False))
1443 else:
1444 raise TypeError("Other input is not a scantable or float value")
1445 s._add_history("operator +", varlist)
1446 print_log()
1447 return s
1448
1449 def __sub__(self, other):
1450 """
1451 implicit on all axes and on Tsys
1452 """
1453 varlist = vars()
1454 s = None
1455 if isinstance(other, scantable):
1456 print "scantable - scantable NYI"
1457 return
1458 elif isinstance(other, float):
1459 s = scantable(self._math._unaryop(self, other, "SUB", False))
1460 else:
1461 raise TypeError("Other input is not a scantable or float value")
1462 s._add_history("operator -", varlist)
1463 print_log()
1464 return s
1465
1466 def __mul__(self, other):
1467 """
1468 implicit on all axes and on Tsys
1469 """
1470 varlist = vars()
1471 s = None
1472 if isinstance(other, scantable):
1473 print "scantable * scantable NYI"
1474 return
1475 elif isinstance(other, float):
1476 s = scantable(self._math._unaryop(self, other, "MUL", False))
1477 else:
1478 raise TypeError("Other input is not a scantable or float value")
1479 s._add_history("operator *", varlist)
1480 print_log()
1481 return s
1482
1483
1484 def __div__(self, other):
1485 """
1486 implicit on all axes and on Tsys
1487 """
1488 varlist = vars()
1489 s = None
1490 if isinstance(other, scantable):
1491 print "scantable / scantable NYI"
1492 return
1493 elif isinstance(other, float):
1494 if other == 0.0:
1495 raise ZeroDivisionError("Dividing by zero is not recommended")
1496 s = scantable(self._math._unaryop(self, other, "DIV", False))
1497 else:
1498 raise TypeError("Other input is not a scantable or float value")
1499 s._add_history("operator /", varlist)
1500 print_log()
1501 return s
1502
1503 def get_fit(self, row=0):
1504 """
1505 Print or return the stored fits for a row in the scantable
1506 Parameters:
1507 row: the row which the fit has been applied to.
1508 """
1509 if row > self.nrow():
1510 return
1511 from asap.asapfit import asapfit
1512 fit = asapfit(self._getfit(row))
1513 if rcParams['verbose']:
1514 print fit
1515 return
1516 else:
1517 return fit.as_dict()
1518
1519 def _add_history(self, funcname, parameters):
1520 # create date
1521 sep = "##"
1522 from datetime import datetime
1523 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1524 hist = dstr+sep
1525 hist += funcname+sep#cdate+sep
1526 if parameters.has_key('self'): del parameters['self']
1527 for k, v in parameters.iteritems():
1528 if type(v) is dict:
1529 for k2, v2 in v.iteritems():
1530 hist += k2
1531 hist += "="
1532 if isinstance(v2, scantable):
1533 hist += 'scantable'
1534 elif k2 == 'mask':
1535 if isinstance(v2, list) or isinstance(v2, tuple):
1536 hist += str(self._zip_mask(v2))
1537 else:
1538 hist += str(v2)
1539 else:
1540 hist += str(v2)
1541 else:
1542 hist += k
1543 hist += "="
1544 if isinstance(v, scantable):
1545 hist += 'scantable'
1546 elif k == 'mask':
1547 if isinstance(v, list) or isinstance(v, tuple):
1548 hist += str(self._zip_mask(v))
1549 else:
1550 hist += str(v)
1551 else:
1552 hist += str(v)
1553 hist += sep
1554 hist = hist[:-2] # remove trailing '##'
1555 self._addhistory(hist)
1556
1557
1558 def _zip_mask(self, mask):
1559 mask = list(mask)
1560 i = 0
1561 segments = []
1562 while mask[i:].count(1):
1563 i += mask[i:].index(1)
1564 if mask[i:].count(0):
1565 j = i + mask[i:].index(0)
1566 else:
1567 j = len(mask)
1568 segments.append([i, j])
1569 i = j
1570 return segments
1571
1572 def _get_ordinate_label(self):
1573 fu = "("+self.get_fluxunit()+")"
1574 import re
1575 lbl = "Intensity"
1576 if re.match(".K.", fu):
1577 lbl = "Brightness Temperature "+ fu
1578 elif re.match(".Jy.", fu):
1579 lbl = "Flux density "+ fu
1580 return lbl
1581
1582 def _check_ifs(self):
1583 nchans = [self.nchan(i) for i in range(self.nif(-1))]
1584 nchans = filter(lambda t: t > 0, nchans)
1585 return (sum(nchans)/len(nchans) == nchans[0])
1586
1587 def _fill(self, names, unit, average):
1588 import os
1589 from asap._asap import stfiller
1590 first = True
1591 fullnames = []
1592 for name in names:
1593 name = os.path.expandvars(name)
1594 name = os.path.expanduser(name)
1595 if not os.path.exists(name):
1596 msg = "File '%s' does not exists" % (name)
1597 if rcParams['verbose']:
1598 asaplog.push(msg)
1599 print asaplog.pop().strip()
1600 return
1601 raise IOError(msg)
1602 fullnames.append(name)
1603 if average:
1604 asaplog.push('Auto averaging integrations')
1605 stype = int(rcParams['scantable.storage'].lower() == 'disk')
1606 for name in fullnames:
1607 tbl = Scantable(stype)
1608 r = stfiller(tbl)
1609 msg = "Importing %s..." % (name)
1610 asaplog.push(msg, False)
1611 print_log()
1612 r._open(name, -1, -1)
1613 r._read()
1614 #tbl = r._getdata()
1615 if average:
1616 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
1617 #tbl = tbl2
1618 if not first:
1619 tbl = self._math._merge([self, tbl])
1620 #tbl = tbl2
1621 Scantable.__init__(self, tbl)
1622 r._close()
1623 del r, tbl
1624 first = False
1625 if unit is not None:
1626 self.set_fluxunit(unit)
1627 self.set_freqframe(rcParams['scantable.freqframe'])
1628 #self._add_history("scantable", varlist)
1629
Note: See TracBrowser for help on using the repository browser.