source: trunk/python/scantable.py@ 1191

Last change on this file since 1191 was 1190, checked in by mar637, 18 years ago

added set_feedtype (Ticket #61)

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 60.6 KB
Line 
1from asap._asap import Scantable
2from asap import rcParams
3from asap import print_log
4from asap import asaplog
5from asap import selector
6from asap import NUM
7from asap import linecatalog
8
9class scantable(Scantable):
10 """
11 The ASAP container for scans
12 """
13
14 def __init__(self, filename, average=None, unit=None):
15 """
16 Create a scantable from a saved one or make a reference
17 Parameters:
18 filename: the name of an asap table on disk
19 or
20 the name of a rpfits/sdfits/ms file
21 (integrations within scans are auto averaged
22 and the whole file is read)
23 or
24 [advanced] a reference to an existing
25 scantable
26 average: average all integrations withinb a scan on read.
27 The default (True) is taken from .asaprc.
28 unit: brightness unit; must be consistent with K or Jy.
29 Over-rides the default selected by the reader
30 (input rpfits/sdfits/ms) or replaces the value
31 in existing scantables
32 """
33 if average is None:
34 average = rcParams['scantable.autoaverage']
35 #varlist = vars()
36 from asap._asap import stmath
37 self._math = stmath()
38 if isinstance(filename, Scantable):
39 Scantable.__init__(self, filename)
40 else:
41 if isinstance(filename, str):
42 import os.path
43 filename = os.path.expandvars(filename)
44 filename = os.path.expanduser(filename)
45 if not os.path.exists(filename):
46 s = "File '%s' not found." % (filename)
47 if rcParams['verbose']:
48 asaplog.push(s)
49 print asaplog.pop().strip()
50 return
51 raise IOError(s)
52 if os.path.isdir(filename) \
53 and not os.path.exists(filename+'/table.f1'):
54 # crude check if asap table
55 if os.path.exists(filename+'/table.info'):
56 ondisk = rcParams['scantable.storage'] == 'disk'
57 Scantable.__init__(self, filename, ondisk)
58 if unit is not None:
59 self.set_fluxunit(unit)
60 self.set_freqframe(rcParams['scantable.freqframe'])
61 else:
62 msg = "The given file '%s'is not a valid " \
63 "asap table." % (filename)
64 if rcParams['verbose']:
65 print msg
66 return
67 else:
68 raise IOError(msg)
69 else:
70 self._fill([filename], unit, average)
71 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
72 and isinstance(filename[-1], str):
73 self._fill(filename, unit, average)
74 print_log()
75
76 def save(self, name=None, format=None, overwrite=False):
77 """
78 Store the scantable on disk. This can be an asap (aips++) Table, SDFITS,
79 Image FITS or MS2 format.
80 Parameters:
81 name: the name of the outputfile. For format "ASCII"
82 this is the root file name (data in 'name'.txt
83 and header in 'name'_header.txt)
84 format: an optional file format. Default is ASAP.
85 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
86 'SDFITS' (save as SDFITS file)
87 'ASCII' (saves as ascii text file)
88 'MS2' (saves as an aips++
89 MeasurementSet V2)
90 overwrite: If the file should be overwritten if it exists.
91 The default False is to return with warning
92 without writing the output. USE WITH CARE.
93 Example:
94 scan.save('myscan.asap')
95 scan.save('myscan.sdfits', 'SDFITS')
96 """
97 from os import path
98 if format is None: format = rcParams['scantable.save']
99 suffix = '.'+format.lower()
100 if name is None or name == "":
101 name = 'scantable'+suffix
102 msg = "No filename given. Using default name %s..." % name
103 asaplog.push(msg)
104 name = path.expandvars(name)
105 if path.isfile(name) or path.isdir(name):
106 if not overwrite:
107 msg = "File %s exists." % name
108 if rcParams['verbose']:
109 print msg
110 return
111 else:
112 raise IOError(msg)
113 format2 = format.upper()
114 if format2 == 'ASAP':
115 self._save(name)
116 else:
117 from asap._asap import stwriter as stw
118 writer = stw(format2)
119 writer.write(self, name)
120 print_log()
121 return
122
123 def copy(self):
124 """
125 Return a copy of this scantable.
126 Parameters:
127 none
128 Example:
129 copiedscan = scan.copy()
130 """
131 sd = scantable(Scantable._copy(self))
132 return sd
133
134 def drop_scan(self, scanid=None):
135 """
136 Return a new scantable where the specified scan number(s) has(have)
137 been dropped.
138 Parameters:
139 scanid: a (list of) scan number(s)
140 """
141 from asap import _is_sequence_or_number as _is_valid
142 from asap import _to_list
143 from asap import unique
144 if not _is_valid(scanid):
145 if rcParams['verbose']:
146 print "Please specify a scanno to drop from the scantable"
147 return
148 else:
149 raise RuntimeError("No scan given")
150 try:
151 scanid = _to_list(scanid)
152 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
153 for sid in scanid: allscans.remove(sid)
154 if len(allscans) == 0:
155 raise ValueError("Can't remove all scans")
156 except ValueError:
157 if rcParams['verbose']:
158 print "Couldn't find any match."
159 return
160 else: raise
161 try:
162 bsel = self.get_selection()
163 sel = selector()
164 sel.set_scans(allscans)
165 self.set_selection(bsel+sel)
166 scopy = self._copy()
167 self.set_selection(bsel)
168 return scantable(scopy)
169 except RuntimeError:
170 if rcParams['verbose']:
171 print "Couldn't find any match."
172 else:
173 raise
174
175
176 def get_scan(self, scanid=None):
177 """
178 Return a specific scan (by scanno) or collection of scans (by
179 source name) in a new scantable.
180 Parameters:
181 scanid: a (list of) scanno or a source name, unix-style
182 patterns are accepted for source name matching, e.g.
183 '*_R' gets all 'ref scans
184 Example:
185 # get all scans containing the source '323p459'
186 newscan = scan.get_scan('323p459')
187 # get all 'off' scans
188 refscans = scan.get_scan('*_R')
189 # get a susbset of scans by scanno (as listed in scan.summary())
190 newscan = scan.get_scan([0, 2, 7, 10])
191 """
192 if scanid is None:
193 if rcParams['verbose']:
194 print "Please specify a scan no or name to " \
195 "retrieve from the scantable"
196 return
197 else:
198 raise RuntimeError("No scan given")
199
200 try:
201 bsel = self.get_selection()
202 sel = selector()
203 if type(scanid) is str:
204 sel.set_name(scanid)
205 self.set_selection(bsel+sel)
206 scopy = self._copy()
207 self.set_selection(bsel)
208 return scantable(scopy)
209 elif type(scanid) is int:
210 sel.set_scans([scanid])
211 self.set_selection(bsel+sel)
212 scopy = self._copy()
213 self.set_selection(bsel)
214 return scantable(scopy)
215 elif type(scanid) is list:
216 sel.set_scans(scanid)
217 self.set_selection(sel)
218 scopy = self._copy()
219 self.set_selection(bsel)
220 return scantable(scopy)
221 else:
222 msg = "Illegal scanid type, use 'int' or 'list' if ints."
223 if rcParams['verbose']:
224 print msg
225 else:
226 raise TypeError(msg)
227 except RuntimeError:
228 if rcParams['verbose']: print "Couldn't find any match."
229 else: raise
230
231 def __str__(self):
232 return Scantable._summary(self, True)
233
234 def summary(self, filename=None):
235 """
236 Print a summary of the contents of this scantable.
237 Parameters:
238 filename: the name of a file to write the putput to
239 Default - no file output
240 verbose: print extra info such as the frequency table
241 The default (False) is taken from .asaprc
242 """
243 info = Scantable._summary(self, True)
244 #if verbose is None: verbose = rcParams['scantable.verbosesummary']
245 if filename is not None:
246 if filename is "":
247 filename = 'scantable_summary.txt'
248 from os.path import expandvars, isdir
249 filename = expandvars(filename)
250 if not isdir(filename):
251 data = open(filename, 'w')
252 data.write(info)
253 data.close()
254 else:
255 msg = "Illegal file name '%s'." % (filename)
256 if rcParams['verbose']:
257 print msg
258 else:
259 raise IOError(msg)
260 if rcParams['verbose']:
261 try:
262 from IPython.genutils import page as pager
263 except ImportError:
264 from pydoc import pager
265 pager(info)
266 else:
267 return info
268
269
270 def get_selection(self):
271 """
272 Get the selection object currently set on this scantable.
273 Parameters:
274 none
275 Example:
276 sel = scan.get_selection()
277 sel.set_ifs(0) # select IF 0
278 scan.set_selection(sel) # apply modified selection
279 """
280 return selector(self._getselection())
281
282 def set_selection(self, selection=selector()):
283 """
284 Select a subset of the data. All following operations on this scantable
285 are only applied to thi selection.
286 Parameters:
287 selection: a selector object (default unset the selection)
288 Examples:
289 sel = selector() # create a selection object
290 self.set_scans([0, 3]) # select SCANNO 0 and 3
291 scan.set_selection(sel) # set the selection
292 scan.summary() # will only print summary of scanno 0 an 3
293 scan.set_selection() # unset the selection
294 """
295 self._setselection(selection)
296
297 def stats(self, stat='stddev', mask=None):
298 """
299 Determine the specified statistic of the current beam/if/pol
300 Takes a 'mask' as an optional parameter to specify which
301 channels should be excluded.
302 Parameters:
303 stat: 'min', 'max', 'sumsq', 'sum', 'mean'
304 'var', 'stddev', 'avdev', 'rms', 'median'
305 mask: an optional mask specifying where the statistic
306 should be determined.
307 Example:
308 scan.set_unit('channel')
309 msk = scan.create_mask([100, 200], [500, 600])
310 scan.stats(stat='mean', mask=m)
311 """
312 if mask == None:
313 mask = []
314 axes = ['Beam', 'IF', 'Pol', 'Time']
315 if not self._check_ifs():
316 raise ValueError("Cannot apply mask as the IFs have different "
317 "number of channels. Please use setselection() "
318 "to select individual IFs")
319
320 statvals = self._math._stats(self, mask, stat)
321 out = ''
322 axes = []
323 for i in range(self.nrow()):
324 axis = []
325 axis.append(self.getscan(i))
326 axis.append(self.getbeam(i))
327 axis.append(self.getif(i))
328 axis.append(self.getpol(i))
329 axis.append(self.getcycle(i))
330 axes.append(axis)
331 tm = self._gettime(i)
332 src = self._getsourcename(i)
333 out += 'Scan[%d] (%s) ' % (axis[0], src)
334 out += 'Time[%s]:\n' % (tm)
335 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
336 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
337 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
338 out += '= %3.3f\n' % (statvals[i])
339 out += "--------------------------------------------------\n"
340
341 if rcParams['verbose']:
342 print "--------------------------------------------------"
343 print " ", stat
344 print "--------------------------------------------------"
345 print out
346 retval = { 'axesnames': ['scanno', 'beamno', 'ifno', 'polno', 'cycleno'],
347 'axes' : axes,
348 'data': statvals}
349 return retval
350
351 def stddev(self, mask=None):
352 """
353 Determine the standard deviation of the current beam/if/pol
354 Takes a 'mask' as an optional parameter to specify which
355 channels should be excluded.
356 Parameters:
357 mask: an optional mask specifying where the standard
358 deviation should be determined.
359
360 Example:
361 scan.set_unit('channel')
362 msk = scan.create_mask([100, 200], [500, 600])
363 scan.stddev(mask=m)
364 """
365 return self.stats(stat='stddev', mask=mask);
366
367
368 def column_names(self):
369 """
370 Return a list of column names, which can be used for selection.
371 """
372 return list(Scantable.column_names(self))
373
374 def get_tsys(self):
375 """
376 Return the System temperatures.
377 Parameters:
378
379 Returns:
380 a list of Tsys values for the current selection
381 """
382
383 return self._row_callback(self._gettsys, "Tsys")
384
385 def _row_callback(self, callback, label):
386 axes = []
387 axesnames = ['scanno', 'beamno', 'ifno', 'polno', 'cycleno']
388 out = ""
389 outvec = []
390 for i in range(self.nrow()):
391 axis = []
392 axis.append(self.getscan(i))
393 axis.append(self.getbeam(i))
394 axis.append(self.getif(i))
395 axis.append(self.getpol(i))
396 axis.append(self.getcycle(i))
397 axes.append(axis)
398 tm = self._gettime(i)
399 src = self._getsourcename(i)
400 out += 'Scan[%d] (%s) ' % (axis[0], src)
401 out += 'Time[%s]:\n' % (tm)
402 if self.nbeam(-1) > 1: out += ' Beam[%d] ' % (axis[1])
403 if self.nif(-1) > 1: out += ' IF[%d] ' % (axis[2])
404 if self.npol(-1) > 1: out += ' Pol[%d] ' % (axis[3])
405 outvec.append(callback(i))
406 out += '= %3.3f\n' % (outvec[i])
407 out += "--------------------------------------------------\n"
408 if rcParams['verbose']:
409 print "--------------------------------------------------"
410 print " %s" % (label)
411 print "--------------------------------------------------"
412 print out
413 # disabled because the vector seems more useful
414 #retval = {'axesnames': axesnames, 'axes': axes, 'data': outvec}
415 return outvec
416
417 def _get_column(self, callback, row=-1):
418 """
419 """
420 if row == -1:
421 return [callback(i) for i in range(self.nrow())]
422 else:
423 if 0 <= row < self.nrow():
424 return callback(row)
425
426
427 def get_time(self, row=-1):
428 """
429 Get a list of time stamps for the observations.
430 Return a string for each integration in the scantable.
431 Parameters:
432 row: row no of integration. Default -1 return all rows
433 Example:
434 none
435 """
436 from time import strptime
437 from datetime import datetime
438 times = self._get_column(self._gettime, row)
439 format = "%Y/%m/%d/%H:%M:%S"
440 if isinstance(times, list):
441 return [datetime(*strptime(i, format)[:6]) for i in times]
442 else:
443 return datetime(*strptime(times, format)[:6])
444
445 def get_sourcename(self, row=-1):
446 """
447 Get a list source names for the observations.
448 Return a string for each integration in the scantable.
449 Parameters:
450 row: row no of integration. Default -1 return all rows
451 Example:
452 none
453 """
454 return self._get_column(self._getsourcename, row)
455
456 def get_elevation(self, row=-1):
457 """
458 Get a list of elevations for the observations.
459 Return a float for each integration in the scantable.
460 Parameters:
461 row: row no of integration. Default -1 return all rows
462 Example:
463 none
464 """
465 return self._get_column(self._getelevation, row)
466
467 def get_azimuth(self, row=-1):
468 """
469 Get a list of azimuths for the observations.
470 Return a float for each integration in the scantable.
471 Parameters:
472 row: row no of integration. Default -1 return all rows
473 Example:
474 none
475 """
476 return self._get_column(self._getazimuth, row)
477
478 def get_parangle(self, row=-1):
479 """
480 Get a list of parallactic angles for the observations.
481 Return a float for each integration in the scantable.
482 Parameters:
483 row: row no of integration. Default -1 return all rows
484 Example:
485 none
486 """
487 return self._get_column(self._getparangle, row)
488
489 def get_direction(self, row=-1):
490 """
491 Get a list of Positions on the sky (direction) for the observations.
492 Return a float for each integration in the scantable.
493 Parameters:
494 row: row no of integration. Default -1 return all rows
495 Example:
496 none
497 """
498 return self._get_column(self._getdirection, row)
499
500 def set_unit(self, unit='channel'):
501 """
502 Set the unit for all following operations on this scantable
503 Parameters:
504 unit: optional unit, default is 'channel'
505 one of '*Hz', 'km/s', 'channel', ''
506 """
507 varlist = vars()
508 if unit in ['', 'pixel', 'channel']:
509 unit = ''
510 inf = list(self._getcoordinfo())
511 inf[0] = unit
512 self._setcoordinfo(inf)
513 self._add_history("set_unit", varlist)
514
515 def set_instrument(self, instr):
516 """
517 Set the instrument for subsequent processing
518 Parameters:
519 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
520 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
521 """
522 self._setInstrument(instr)
523 self._add_history("set_instument", vars())
524 print_log()
525
526 def set_feedtype(self, feedtype):
527 """
528 Overwrite the feed type, which might not be set correctly.
529 Parameters:
530 feedtype: 'linear' or 'circular'
531 """
532 self._setfeedtype(feedtype)
533 self._add_history("set_feedtype", vars())
534 print_log()
535
536 def set_doppler(self, doppler='RADIO'):
537 """
538 Set the doppler for all following operations on this scantable.
539 Parameters:
540 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
541 """
542 varlist = vars()
543 inf = list(self._getcoordinfo())
544 inf[2] = doppler
545 self._setcoordinfo(inf)
546 self._add_history("set_doppler", vars())
547 print_log()
548
549 def set_freqframe(self, frame=None):
550 """
551 Set the frame type of the Spectral Axis.
552 Parameters:
553 frame: an optional frame type, default 'LSRK'. Valid frames are:
554 'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
555 'GEO', 'GALACTO', 'LGROUP', 'CMB'
556 Examples:
557 scan.set_freqframe('BARY')
558 """
559 if frame is None: frame = rcParams['scantable.freqframe']
560 varlist = vars()
561 valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
562 'GEO', 'GALACTO', 'LGROUP', 'CMB']
563
564 if frame in valid:
565 inf = list(self._getcoordinfo())
566 inf[1] = frame
567 self._setcoordinfo(inf)
568 self._add_history("set_freqframe", varlist)
569 else:
570 msg = "Please specify a valid freq type. Valid types are:\n", valid
571 if rcParams['verbose']:
572 print msg
573 else:
574 raise TypeError(msg)
575 print_log()
576
577 def set_dirframe(self, frame=""):
578 """
579 Set the frame type of the Direction on the sky.
580 Parameters:
581 frame: an optional frame type, default ''. Valid frames are:
582 'J2000', 'B1950', 'GALACTIC'
583 Examples:
584 scan.set_dirframe('GALACTIC')
585 """
586 varlist = vars()
587 try:
588 Scantable.set_dirframe(self, frame)
589 except RuntimeError, msg:
590 if rcParams['verbose']:
591 print msg
592 else:
593 raise
594 self._add_history("set_dirframe", varlist)
595
596 def get_unit(self):
597 """
598 Get the default unit set in this scantable
599 Parameters:
600 Returns:
601 A unit string
602 """
603 inf = self._getcoordinfo()
604 unit = inf[0]
605 if unit == '': unit = 'channel'
606 return unit
607
608 def get_abcissa(self, rowno=0):
609 """
610 Get the abcissa in the current coordinate setup for the currently
611 selected Beam/IF/Pol
612 Parameters:
613 rowno: an optional row number in the scantable. Default is the
614 first row, i.e. rowno=0
615 Returns:
616 The abcissa values and it's format string (as a dictionary)
617 """
618 abc = self._getabcissa(rowno)
619 lbl = self._getabcissalabel(rowno)
620 print_log()
621 return abc, lbl
622
623 def flag(self, mask=None):
624 """
625 Flag the selected data using an optional channel mask.
626 Parameters:
627 mask: an optional channel mask, created with create_mask. Default
628 (no mask) is all channels.
629 """
630 varlist = vars()
631 if mask is None:
632 mask = []
633 try:
634 self._flag(mask)
635 except RuntimeError, msg:
636 if rcParams['verbose']:
637 print msg
638 return
639 else: raise
640 self._add_history("flag", varlist)
641
642
643 def create_mask(self, *args, **kwargs):
644 """
645 Compute and return a mask based on [min, max] windows.
646 The specified windows are to be INCLUDED, when the mask is
647 applied.
648 Parameters:
649 [min, max], [min2, max2], ...
650 Pairs of start/end points (inclusive)specifying the regions
651 to be masked
652 invert: optional argument. If specified as True,
653 return an inverted mask, i.e. the regions
654 specified are EXCLUDED
655 row: create the mask using the specified row for
656 unit conversions, default is row=0
657 only necessary if frequency varies over rows.
658 Example:
659 scan.set_unit('channel')
660 a)
661 msk = scan.create_mask([400, 500], [800, 900])
662 # masks everything outside 400 and 500
663 # and 800 and 900 in the unit 'channel'
664
665 b)
666 msk = scan.create_mask([400, 500], [800, 900], invert=True)
667 # masks the regions between 400 and 500
668 # and 800 and 900 in the unit 'channel'
669 c)
670 mask only channel 400
671 msk = scan.create_mask([400, 400])
672 """
673 row = 0
674 if kwargs.has_key("row"):
675 row = kwargs.get("row")
676 data = self._getabcissa(row)
677 u = self._getcoordinfo()[0]
678 if rcParams['verbose']:
679 if u == "": u = "channel"
680 msg = "The current mask window unit is %s" % u
681 i = self._check_ifs()
682 if not i:
683 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
684 asaplog.push(msg)
685 n = self.nchan()
686 msk = NUM.zeros(n)
687 # test if args is a 'list' or a 'normal *args - UGLY!!!
688
689 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
690 and args or args[0]
691 for window in ws:
692 if (len(window) != 2 or window[0] > window[1] ):
693 raise TypeError("A window needs to be defined as [min, max]")
694 for i in range(n):
695 if data[i] >= window[0] and data[i] <= window[1]:
696 msk[i] = 1
697 if kwargs.has_key('invert'):
698 if kwargs.get('invert'):
699 msk = NUM.logical_not(msk)
700 print_log()
701 return msk
702
703 def get_restfreqs(self):
704 """
705 Get the restfrequency(s) stored in this scantable.
706 The return value(s) are always of unit 'Hz'
707 Parameters:
708 none
709 Returns:
710 a list of doubles
711 """
712 return list(self._getrestfreqs())
713
714
715 def set_restfreqs(self, freqs=None, unit='Hz'):
716 """
717 Set or replace the restfrequency specified and
718 If the 'freqs' argument holds a scalar,
719 then that rest frequency will be applied to all the selected
720 data. If the 'freqs' argument holds
721 a vector, then it MUST be of equal or smaller length than
722 the number of IFs (and the available restfrequencies will be
723 replaced by this vector). In this case, *all* data have
724 the restfrequency set per IF according
725 to the corresponding value you give in the 'freqs' vector.
726 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
727 IF 1 gets restfreq 2e9.
728 You can also specify the frequencies via a linecatalog/
729
730 Parameters:
731 freqs: list of rest frequency values or string idenitfiers
732 unit: unit for rest frequency (default 'Hz')
733
734 Example:
735 # set the given restfrequency for the whole table
736 scan.set_restfreqs(freqs=1.4e9)
737 # If thee number of IFs in the data is >= 2 the IF0 gets the first
738 # value IF1 the second...
739 scan.set_restfreqs(freqs=[1.4e9, 1.67e9])
740 #set the given restfrequency for the whole table (by name)
741 scan.set_restfreqs(freqs="OH1667")
742
743 Note:
744 To do more sophisticate Restfrequency setting, e.g. on a
745 source and IF basis, use scantable.set_selection() before using
746 this function.
747 # provide your scantable is call scan
748 selection = selector()
749 selection.set_name("ORION*")
750 selection.set_ifs([1])
751 scan.set_selection(selection)
752 scan.set_restfreqs(freqs=86.6e9)
753
754 """
755 varlist = vars()
756 from asap import linecatalog
757 # simple value
758 if isinstance(freqs, int) or isinstance(freqs, float):
759 self._setrestfreqs(freqs, "",unit)
760 # list of values
761 elif isinstance(freqs, list) or isinstance(freqs, tuple):
762 # list values are scalars
763 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
764 sel = selector()
765 savesel = self._getselection()
766 for i in xrange(len(freqs)):
767 sel.set_ifs([i])
768 self._setselection(sel)
769 self._setrestfreqs(freqs[i], "",unit)
770 self._setselection(savesel)
771 # list values are tuples, (value, name)
772 elif isinstance(freqs[-1], dict):
773 sel = selector()
774 savesel = self._getselection()
775 for i in xrange(len(freqs)):
776 sel.set_ifs([i])
777 self._setrestfreqs(freqs[i]["value"],
778 freqs[i]["name"], "MHz")
779 self._setselection(sel)
780 self._setselection(savesel)
781 # freqs are to be taken from a linecatalog
782 elif isinstance(freqs, linecatalog):
783 sel = selector()
784 savesel = self._getselection()
785 for i in xrange(freqs.nrow()):
786 sel.set_ifs([i])
787 self._setselection(sel)
788 self._setrestfreqs(freqs.get_frequency(i),
789 freqs.get_name(i), "MHz")
790 # ensure that we are not iterating past nIF
791 if i == self.nif()-1: break
792 self._setselection(savesel)
793 else:
794 return
795 self._add_history("set_restfreqs", varlist)
796
797
798
799 def history(self):
800 hist = list(self._gethistory())
801 out = "-"*80
802 for h in hist:
803 if h.startswith("---"):
804 out += "\n"+h
805 else:
806 items = h.split("##")
807 date = items[0]
808 func = items[1]
809 items = items[2:]
810 out += "\n"+date+"\n"
811 out += "Function: %s\n Parameters:" % (func)
812 for i in items:
813 s = i.split("=")
814 out += "\n %s = %s" % (s[0], s[1])
815 out += "\n"+"-"*80
816 try:
817 from IPython.genutils import page as pager
818 except ImportError:
819 from pydoc import pager
820 pager(out)
821 return
822
823 #
824 # Maths business
825 #
826
827 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
828 """
829 Return the (time) weighted average of a scan.
830 Note:
831 in channels only - align if necessary
832 Parameters:
833 mask: an optional mask (only used for 'var' and 'tsys'
834 weighting)
835 scanav: True averages each scan separately
836 False (default) averages all scans together,
837 weight: Weighting scheme.
838 'none' (mean no weight)
839 'var' (1/var(spec) weighted)
840 'tsys' (1/Tsys**2 weighted)
841 'tint' (integration time weighted)
842 'tintsys' (Tint/Tsys**2)
843 'median' ( median averaging)
844 The default is 'tint'
845 align: align the spectra in velocity before averaging. It takes
846 the time of the first spectrum as reference time.
847 Example:
848 # time average the scantable without using a mask
849 newscan = scan.average_time()
850 """
851 varlist = vars()
852 if weight is None: weight = 'TINT'
853 if mask is None: mask = ()
854 if scanav: scanav = "SCAN"
855 else: scanav = "NONE"
856 scan = (self, )
857 try:
858 if align:
859 scan = (self.freq_align(insitu=False), )
860 s = None
861 if weight.upper() == 'MEDIAN':
862 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
863 scanav))
864 else:
865 s = scantable(self._math._average(scan, mask, weight.upper(),
866 scanav))
867 except RuntimeError, msg:
868 if rcParams['verbose']:
869 print msg
870 return
871 else: raise
872 s._add_history("average_time", varlist)
873 print_log()
874 return s
875
876 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
877 """
878 Return a scan where all spectra are converted to either
879 Jansky or Kelvin depending upon the flux units of the scan table.
880 By default the function tries to look the values up internally.
881 If it can't find them (or if you want to over-ride), you must
882 specify EITHER jyperk OR eta (and D which it will try to look up
883 also if you don't set it). jyperk takes precedence if you set both.
884 Parameters:
885 jyperk: the Jy / K conversion factor
886 eta: the aperture efficiency
887 d: the geomtric diameter (metres)
888 insitu: if False a new scantable is returned.
889 Otherwise, the scaling is done in-situ
890 The default is taken from .asaprc (False)
891 allaxes: if True apply to all spectra. Otherwise
892 apply only to the selected (beam/pol/if)spectra only
893 The default is taken from .asaprc (True if none)
894 """
895 if insitu is None: insitu = rcParams['insitu']
896 self._math._setinsitu(insitu)
897 varlist = vars()
898 if jyperk is None: jyperk = -1.0
899 if d is None: d = -1.0
900 if eta is None: eta = -1.0
901 s = scantable(self._math._convertflux(self, d, eta, jyperk))
902 s._add_history("convert_flux", varlist)
903 print_log()
904 if insitu: self._assign(s)
905 else: return s
906
907 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
908 """
909 Return a scan after applying a gain-elevation correction.
910 The correction can be made via either a polynomial or a
911 table-based interpolation (and extrapolation if necessary).
912 You specify polynomial coefficients, an ascii table or neither.
913 If you specify neither, then a polynomial correction will be made
914 with built in coefficients known for certain telescopes (an error
915 will occur if the instrument is not known).
916 The data and Tsys are *divided* by the scaling factors.
917 Parameters:
918 poly: Polynomial coefficients (default None) to compute a
919 gain-elevation correction as a function of
920 elevation (in degrees).
921 filename: The name of an ascii file holding correction factors.
922 The first row of the ascii file must give the column
923 names and these MUST include columns
924 "ELEVATION" (degrees) and "FACTOR" (multiply data
925 by this) somewhere.
926 The second row must give the data type of the
927 column. Use 'R' for Real and 'I' for Integer.
928 An example file would be
929 (actual factors are arbitrary) :
930
931 TIME ELEVATION FACTOR
932 R R R
933 0.1 0 0.8
934 0.2 20 0.85
935 0.3 40 0.9
936 0.4 60 0.85
937 0.5 80 0.8
938 0.6 90 0.75
939 method: Interpolation method when correcting from a table.
940 Values are "nearest", "linear" (default), "cubic"
941 and "spline"
942 insitu: if False a new scantable is returned.
943 Otherwise, the scaling is done in-situ
944 The default is taken from .asaprc (False)
945 """
946
947 if insitu is None: insitu = rcParams['insitu']
948 self._math._setinsitu(insitu)
949 varlist = vars()
950 if poly is None:
951 poly = ()
952 from os.path import expandvars
953 filename = expandvars(filename)
954 s = scantable(self._math._gainel(self, poly, filename, method))
955 s._add_history("gain_el", varlist)
956 print_log()
957 if insitu: self._assign(s)
958 else: return s
959
960 def freq_align(self, reftime=None, method='cubic', insitu=None):
961 """
962 Return a scan where all rows have been aligned in frequency/velocity.
963 The alignment frequency frame (e.g. LSRK) is that set by function
964 set_freqframe.
965 Parameters:
966 reftime: reference time to align at. By default, the time of
967 the first row of data is used.
968 method: Interpolation method for regridding the spectra.
969 Choose from "nearest", "linear", "cubic" (default)
970 and "spline"
971 insitu: if False a new scantable is returned.
972 Otherwise, the scaling is done in-situ
973 The default is taken from .asaprc (False)
974 """
975 if insitu is None: insitu = rcParams["insitu"]
976 self._math._setinsitu(insitu)
977 varlist = vars()
978 if reftime is None: reftime = ""
979 s = scantable(self._math._freq_align(self, reftime, method))
980 s._add_history("freq_align", varlist)
981 print_log()
982 if insitu: self._assign(s)
983 else: return s
984
985 def opacity(self, tau, insitu=None):
986 """
987 Apply an opacity correction. The data
988 and Tsys are multiplied by the correction factor.
989 Parameters:
990 tau: Opacity from which the correction factor is
991 exp(tau*ZD)
992 where ZD is the zenith-distance
993 insitu: if False a new scantable is returned.
994 Otherwise, the scaling is done in-situ
995 The default is taken from .asaprc (False)
996 """
997 if insitu is None: insitu = rcParams['insitu']
998 self._math._setinsitu(insitu)
999 varlist = vars()
1000 s = scantable(self._math._opacity(self, tau))
1001 s._add_history("opacity", varlist)
1002 print_log()
1003 if insitu: self._assign(s)
1004 else: return s
1005
1006 def bin(self, width=5, insitu=None):
1007 """
1008 Return a scan where all spectra have been binned up.
1009 width: The bin width (default=5) in pixels
1010 insitu: if False a new scantable is returned.
1011 Otherwise, the scaling is done in-situ
1012 The default is taken from .asaprc (False)
1013 """
1014 if insitu is None: insitu = rcParams['insitu']
1015 self._math._setinsitu(insitu)
1016 varlist = vars()
1017 s = scantable(self._math._bin(self, width))
1018 s._add_history("bin", varlist)
1019 print_log()
1020 if insitu: self._assign(s)
1021 else: return s
1022
1023
1024 def resample(self, width=5, method='cubic', insitu=None):
1025 """
1026 Return a scan where all spectra have been binned up
1027 width: The bin width (default=5) in pixels
1028 method: Interpolation method when correcting from a table.
1029 Values are "nearest", "linear", "cubic" (default)
1030 and "spline"
1031 insitu: if False a new scantable is returned.
1032 Otherwise, the scaling is done in-situ
1033 The default is taken from .asaprc (False)
1034 """
1035 if insitu is None: insitu = rcParams['insitu']
1036 self._math._setinsitu(insitu)
1037 varlist = vars()
1038 s = scantable(self._math._resample(self, method, width))
1039 s._add_history("resample", varlist)
1040 print_log()
1041 if insitu: self._assign(s)
1042 else: return s
1043
1044
1045 def average_pol(self, mask=None, weight='none'):
1046 """
1047 Average the Polarisations together.
1048 Parameters:
1049 mask: An optional mask defining the region, where the
1050 averaging will be applied. The output will have all
1051 specified points masked.
1052 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1053 weighted), or 'tsys' (1/Tsys**2 weighted)
1054 """
1055 varlist = vars()
1056 if mask is None:
1057 mask = ()
1058 s = scantable(self._math._averagepol(self, mask, weight.upper()))
1059 s._add_history("average_pol", varlist)
1060 print_log()
1061 return s
1062
1063 def average_beam(self, mask=None, weight='none'):
1064 """
1065 Average the Beams together.
1066 Parameters:
1067 mask: An optional mask defining the region, where the
1068 averaging will be applied. The output will have all
1069 specified points masked.
1070 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1071 weighted), or 'tsys' (1/Tsys**2 weighted)
1072 """
1073 varlist = vars()
1074 if mask is None:
1075 mask = ()
1076 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1077 s._add_history("average_beam", varlist)
1078 print_log()
1079 return s
1080
1081 def convert_pol(self, poltype=None):
1082 """
1083 Convert the data to a different polarisation type.
1084 Parameters:
1085 poltype: The new polarisation type. Valid types are:
1086 "linear", "stokes" and "circular"
1087 """
1088 varlist = vars()
1089 try:
1090 s = scantable(self._math._convertpol(self, poltype))
1091 except RuntimeError, msg:
1092 if rcParams['verbose']:
1093 print msg
1094 return
1095 else:
1096 raise
1097 s._add_history("convert_pol", varlist)
1098 print_log()
1099 return s
1100
1101 def smooth(self, kernel="hanning", width=5.0, insitu=None):
1102 """
1103 Smooth the spectrum by the specified kernel (conserving flux).
1104 Parameters:
1105 scan: The input scan
1106 kernel: The type of smoothing kernel. Select from
1107 'hanning' (default), 'gaussian' and 'boxcar'.
1108 The first three characters are sufficient.
1109 width: The width of the kernel in pixels. For hanning this is
1110 ignored otherwise it defauls to 5 pixels.
1111 For 'gaussian' it is the Full Width Half
1112 Maximum. For 'boxcar' it is the full width.
1113 insitu: if False a new scantable is returned.
1114 Otherwise, the scaling is done in-situ
1115 The default is taken from .asaprc (False)
1116 Example:
1117 none
1118 """
1119 if insitu is None: insitu = rcParams['insitu']
1120 self._math._setinsitu(insitu)
1121 varlist = vars()
1122 s = scantable(self._math._smooth(self, kernel.lower(), width))
1123 s._add_history("smooth", varlist)
1124 print_log()
1125 if insitu: self._assign(s)
1126 else: return s
1127
1128
1129 def poly_baseline(self, mask=None, order=0, plot=False, insitu=None):
1130 """
1131 Return a scan which has been baselined (all rows) by a polynomial.
1132 Parameters:
1133 scan: a scantable
1134 mask: an optional mask
1135 order: the order of the polynomial (default is 0)
1136 plot: plot the fit and the residual. In this each
1137 indivual fit has to be approved, by typing 'y'
1138 or 'n'
1139 insitu: if False a new scantable is returned.
1140 Otherwise, the scaling is done in-situ
1141 The default is taken from .asaprc (False)
1142 Example:
1143 # return a scan baselined by a third order polynomial,
1144 # not using a mask
1145 bscan = scan.poly_baseline(order=3)
1146 """
1147 if insitu is None: insitu = rcParams['insitu']
1148 varlist = vars()
1149 if mask is None:
1150 mask = list(NUM.ones(self.nchan(-1)))
1151 from asap.asapfitter import fitter
1152 f = fitter()
1153 f.set_scan(self, mask)
1154 f.set_function(poly=order)
1155 s = f.auto_fit(insitu, plot=plot)
1156 s._add_history("poly_baseline", varlist)
1157 print_log()
1158 if insitu: self._assign(s)
1159 else: return s
1160
1161 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
1162 threshold=3, plot=False, insitu=None):
1163 """
1164 Return a scan which has been baselined (all rows) by a polynomial.
1165 Spectral lines are detected first using linefinder and masked out
1166 to avoid them affecting the baseline solution.
1167
1168 Parameters:
1169 mask: an optional mask retreived from scantable
1170 edge: an optional number of channel to drop at
1171 the edge of spectrum. If only one value is
1172 specified, the same number will be dropped from
1173 both sides of the spectrum. Default is to keep
1174 all channels. Nested tuples represent individual
1175 edge selection for different IFs (a number of spectral
1176 channels can be different)
1177 order: the order of the polynomial (default is 0)
1178 threshold: the threshold used by line finder. It is better to
1179 keep it large as only strong lines affect the
1180 baseline solution.
1181 plot: plot the fit and the residual. In this each
1182 indivual fit has to be approved, by typing 'y'
1183 or 'n'
1184 insitu: if False a new scantable is returned.
1185 Otherwise, the scaling is done in-situ
1186 The default is taken from .asaprc (False)
1187
1188 Example:
1189 scan2=scan.auto_poly_baseline(order=7)
1190 """
1191 if insitu is None: insitu = rcParams['insitu']
1192 varlist = vars()
1193 from asap.asapfitter import fitter
1194 from asap.asaplinefind import linefinder
1195 from asap import _is_sequence_or_number as _is_valid
1196
1197 # check whether edge is set up for each IF individually
1198 individualedge = False;
1199 if len(edge) > 1:
1200 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1201 individualedge = True;
1202
1203 if not _is_valid(edge, int) and not individualedge:
1204 raise ValueError, "Parameter 'edge' has to be an integer or a \
1205 pair of integers specified as a tuple. Nested tuples are allowed \
1206 to make individual selection for different IFs."
1207
1208 curedge = (0, 0)
1209 if individualedge:
1210 for edgepar in edge:
1211 if not _is_valid(edgepar, int):
1212 raise ValueError, "Each element of the 'edge' tuple has \
1213 to be a pair of integers or an integer."
1214 else:
1215 curedge = edge;
1216
1217 # setup fitter
1218 f = fitter()
1219 f.set_function(poly=order)
1220
1221 # setup line finder
1222 fl = linefinder()
1223 fl.set_options(threshold=threshold)
1224
1225 if not insitu:
1226 workscan = self.copy()
1227 else:
1228 workscan = self
1229
1230 fl.set_scan(workscan)
1231
1232 rows = range(workscan.nrow())
1233 asaplog.push("Processing:")
1234 for r in rows:
1235 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1236 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1237 workscan.getpol(r), workscan.getcycle(r))
1238 asaplog.push(msg, False)
1239
1240 # figure out edge parameter
1241 if individualedge:
1242 if len(edge) >= workscan.getif(r):
1243 raise RuntimeError, "Number of edge elements appear to " \
1244 "be less than the number of IFs"
1245 curedge = edge[workscan.getif(r)]
1246
1247 # setup line finder
1248 fl.find_lines(r, mask, curedge)
1249 f.set_scan(workscan, fl.get_mask())
1250 f.x = workscan._getabcissa(r)
1251 f.y = workscan._getspectrum(r)
1252 f.data = None
1253 f.fit()
1254 x = f.get_parameters()
1255 if plot:
1256 f.plot(residual=True)
1257 x = raw_input("Accept fit ( [y]/n ): ")
1258 if x.upper() == 'N':
1259 continue
1260 workscan._setspectrum(f.fitter.getresidual(), r)
1261 if plot:
1262 f._p.unmap()
1263 f._p = None
1264 workscan._add_history("auto_poly_baseline", varlist)
1265 if insitu:
1266 self._assign(workscan)
1267 else:
1268 return workscan
1269
1270 def rotate_linpolphase(self, angle):
1271 """
1272 Rotate the phase of the complex polarization O=Q+iU correlation.
1273 This is always done in situ in the raw data. So if you call this
1274 function more than once then each call rotates the phase further.
1275 Parameters:
1276 angle: The angle (degrees) to rotate (add) by.
1277 Examples:
1278 scan.rotate_linpolphase(2.3)
1279 """
1280 varlist = vars()
1281 self._math._rotate_linpolphase(self, angle)
1282 self._add_history("rotate_linpolphase", varlist)
1283 print_log()
1284 return
1285
1286
1287 def rotate_xyphase(self, angle):
1288 """
1289 Rotate the phase of the XY correlation. This is always done in situ
1290 in the data. So if you call this function more than once
1291 then each call rotates the phase further.
1292 Parameters:
1293 angle: The angle (degrees) to rotate (add) by.
1294 Examples:
1295 scan.rotate_xyphase(2.3)
1296 """
1297 varlist = vars()
1298 self._math._rotate_xyphase(self, angle)
1299 self._add_history("rotate_xyphase", varlist)
1300 print_log()
1301 return
1302
1303 def swap_linears(self):
1304 """
1305 Swap the linear polarisations XX and YY
1306 """
1307 varlist = vars()
1308 self._math._swap_linears(self)
1309 self._add_history("swap_linears", varlist)
1310 print_log()
1311 return
1312
1313 def invert_phase(self):
1314 """
1315 Invert the phase of the complex polarisation
1316 """
1317 varlist = vars()
1318 self._math._invert_phase(self)
1319 self._add_history("invert_phase", varlist)
1320 print_log()
1321 return
1322
1323 def add(self, offset, insitu=None):
1324 """
1325 Return a scan where all spectra have the offset added
1326 Parameters:
1327 offset: the offset
1328 insitu: if False a new scantable is returned.
1329 Otherwise, the scaling is done in-situ
1330 The default is taken from .asaprc (False)
1331 """
1332 if insitu is None: insitu = rcParams['insitu']
1333 self._math._setinsitu(insitu)
1334 varlist = vars()
1335 s = scantable(self._math._unaryop(self, offset, "ADD", False))
1336 s._add_history("add", varlist)
1337 print_log()
1338 if insitu:
1339 self._assign(s)
1340 else:
1341 return s
1342
1343 def scale(self, factor, tsys=True, insitu=None, ):
1344 """
1345 Return a scan where all spectra are scaled by the give 'factor'
1346 Parameters:
1347 factor: the scaling factor
1348 insitu: if False a new scantable is returned.
1349 Otherwise, the scaling is done in-situ
1350 The default is taken from .asaprc (False)
1351 tsys: if True (default) then apply the operation to Tsys
1352 as well as the data
1353 """
1354 if insitu is None: insitu = rcParams['insitu']
1355 self._math._setinsitu(insitu)
1356 varlist = vars()
1357 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
1358 s._add_history("scale", varlist)
1359 print_log()
1360 if insitu:
1361 self._assign(s)
1362 else:
1363 return s
1364
1365 def auto_quotient(self, mode='time', preserve=True):
1366 """
1367 This function allows to build quotients automatically.
1368 It assumes the observation to have the same numer of
1369 "ons" and "offs"
1370 It will support "closest off in time" in the future
1371 Parameters:
1372 mode: the on/off detection mode; 'suffix' (default)
1373 'suffix' identifies 'off' scans by the
1374 trailing '_R' (Mopra/Parkes) or
1375 '_e'/'_w' (Tid)
1376 preserve: you can preserve (default) the continuum or
1377 remove it. The equations used are
1378 preserve: Output = Toff * (on/off) - Toff
1379 remove: Output = Toff * (on/off) - Ton
1380 """
1381 modes = ["time"]
1382 if not mode in modes:
1383 msg = "please provide valid mode. Valid modes are %s" % (modes)
1384 raise ValueError(msg)
1385 varlist = vars()
1386 s = scantable(self._math._auto_quotient(self, mode, preserve))
1387 s._add_history("auto_quotient", varlist)
1388 print_log()
1389 return s
1390
1391 def mx_quotient(self, mask = None, weight='median', preserve=True):
1392 """
1393 Form a quotient using "off" beams when observing in "MX" mode.
1394 Parameters:
1395 mask: an optional mask to be used when weight == 'stddev'
1396 weight: How to average the off beams. Default is 'median'.
1397 preserve: you can preserve (default) the continuum or
1398 remove it. The equations used are
1399 preserve: Output = Toff * (on/off) - Toff
1400 remove: Output = Toff * (on/off) - Ton
1401 """
1402 if mask is None: mask = ()
1403 varlist = vars()
1404 on = scantable(self._math._mx_extract(self, 'on'))
1405 preoff = scantable(self._math._mx_extract(self, 'off'))
1406 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
1407 from asapmath import quotient
1408 q = quotient(on, off, preserve)
1409 q._add_history("mx_quotient", varlist)
1410 print_log()
1411 return q
1412
1413 def freq_switch(self, insitu=None):
1414 """
1415 Apply frequency switching to the data.
1416 Parameters:
1417 insitu: if False a new scantable is returned.
1418 Otherwise, the swictching is done in-situ
1419 The default is taken from .asaprc (False)
1420 Example:
1421 none
1422 """
1423 if insitu is None: insitu = rcParams['insitu']
1424 self._math._setinsitu(insitu)
1425 varlist = vars()
1426 s = scantable(self._math._freqswitch(self))
1427 s._add_history("freq_switch", varlist)
1428 print_log()
1429 if insitu: self._assign(s)
1430 else: return s
1431
1432 def recalc_azel(self):
1433 """
1434 Recalculate the azimuth and elevation for each position.
1435 Parameters:
1436 none
1437 Example:
1438 """
1439 varlist = vars()
1440 self._recalcazel()
1441 self._add_history("recalc_azel", varlist)
1442 print_log()
1443 return
1444
1445 def __add__(self, other):
1446 varlist = vars()
1447 s = None
1448 if isinstance(other, scantable):
1449 print "scantable + scantable NYI"
1450 return
1451 elif isinstance(other, float):
1452 s = scantable(self._math._unaryop(self, other, "ADD", False))
1453 else:
1454 raise TypeError("Other input is not a scantable or float value")
1455 s._add_history("operator +", varlist)
1456 print_log()
1457 return s
1458
1459 def __sub__(self, other):
1460 """
1461 implicit on all axes and on Tsys
1462 """
1463 varlist = vars()
1464 s = None
1465 if isinstance(other, scantable):
1466 print "scantable - scantable NYI"
1467 return
1468 elif isinstance(other, float):
1469 s = scantable(self._math._unaryop(self, other, "SUB", False))
1470 else:
1471 raise TypeError("Other input is not a scantable or float value")
1472 s._add_history("operator -", varlist)
1473 print_log()
1474 return s
1475
1476 def __mul__(self, other):
1477 """
1478 implicit on all axes and on Tsys
1479 """
1480 varlist = vars()
1481 s = None
1482 if isinstance(other, scantable):
1483 print "scantable * scantable NYI"
1484 return
1485 elif isinstance(other, float):
1486 s = scantable(self._math._unaryop(self, other, "MUL", False))
1487 else:
1488 raise TypeError("Other input is not a scantable or float value")
1489 s._add_history("operator *", varlist)
1490 print_log()
1491 return s
1492
1493
1494 def __div__(self, other):
1495 """
1496 implicit on all axes and on Tsys
1497 """
1498 varlist = vars()
1499 s = None
1500 if isinstance(other, scantable):
1501 print "scantable / scantable NYI"
1502 return
1503 elif isinstance(other, float):
1504 if other == 0.0:
1505 raise ZeroDivisionError("Dividing by zero is not recommended")
1506 s = scantable(self._math._unaryop(self, other, "DIV", False))
1507 else:
1508 raise TypeError("Other input is not a scantable or float value")
1509 s._add_history("operator /", varlist)
1510 print_log()
1511 return s
1512
1513 def get_fit(self, row=0):
1514 """
1515 Print or return the stored fits for a row in the scantable
1516 Parameters:
1517 row: the row which the fit has been applied to.
1518 """
1519 if row > self.nrow():
1520 return
1521 from asap.asapfit import asapfit
1522 fit = asapfit(self._getfit(row))
1523 if rcParams['verbose']:
1524 print fit
1525 return
1526 else:
1527 return fit.as_dict()
1528
1529 def _add_history(self, funcname, parameters):
1530 # create date
1531 sep = "##"
1532 from datetime import datetime
1533 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1534 hist = dstr+sep
1535 hist += funcname+sep#cdate+sep
1536 if parameters.has_key('self'): del parameters['self']
1537 for k, v in parameters.iteritems():
1538 if type(v) is dict:
1539 for k2, v2 in v.iteritems():
1540 hist += k2
1541 hist += "="
1542 if isinstance(v2, scantable):
1543 hist += 'scantable'
1544 elif k2 == 'mask':
1545 if isinstance(v2, list) or isinstance(v2, tuple):
1546 hist += str(self._zip_mask(v2))
1547 else:
1548 hist += str(v2)
1549 else:
1550 hist += str(v2)
1551 else:
1552 hist += k
1553 hist += "="
1554 if isinstance(v, scantable):
1555 hist += 'scantable'
1556 elif k == 'mask':
1557 if isinstance(v, list) or isinstance(v, tuple):
1558 hist += str(self._zip_mask(v))
1559 else:
1560 hist += str(v)
1561 else:
1562 hist += str(v)
1563 hist += sep
1564 hist = hist[:-2] # remove trailing '##'
1565 self._addhistory(hist)
1566
1567
1568 def _zip_mask(self, mask):
1569 mask = list(mask)
1570 i = 0
1571 segments = []
1572 while mask[i:].count(1):
1573 i += mask[i:].index(1)
1574 if mask[i:].count(0):
1575 j = i + mask[i:].index(0)
1576 else:
1577 j = len(mask)
1578 segments.append([i, j])
1579 i = j
1580 return segments
1581
1582 def _get_ordinate_label(self):
1583 fu = "("+self.get_fluxunit()+")"
1584 import re
1585 lbl = "Intensity"
1586 if re.match(".K.", fu):
1587 lbl = "Brightness Temperature "+ fu
1588 elif re.match(".Jy.", fu):
1589 lbl = "Flux density "+ fu
1590 return lbl
1591
1592 def _check_ifs(self):
1593 nchans = [self.nchan(i) for i in range(self.nif(-1))]
1594 nchans = filter(lambda t: t > 0, nchans)
1595 return (sum(nchans)/len(nchans) == nchans[0])
1596
1597 def _fill(self, names, unit, average):
1598 import os
1599 from asap._asap import stfiller
1600 first = True
1601 fullnames = []
1602 for name in names:
1603 name = os.path.expandvars(name)
1604 name = os.path.expanduser(name)
1605 if not os.path.exists(name):
1606 msg = "File '%s' does not exists" % (name)
1607 if rcParams['verbose']:
1608 asaplog.push(msg)
1609 print asaplog.pop().strip()
1610 return
1611 raise IOError(msg)
1612 fullnames.append(name)
1613 if average:
1614 asaplog.push('Auto averaging integrations')
1615 stype = int(rcParams['scantable.storage'].lower() == 'disk')
1616 for name in fullnames:
1617 tbl = Scantable(stype)
1618 r = stfiller(tbl)
1619 msg = "Importing %s..." % (name)
1620 asaplog.push(msg, False)
1621 print_log()
1622 r._open(name, -1, -1)
1623 r._read()
1624 #tbl = r._getdata()
1625 if average:
1626 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
1627 #tbl = tbl2
1628 if not first:
1629 tbl = self._math._merge([self, tbl])
1630 #tbl = tbl2
1631 Scantable.__init__(self, tbl)
1632 r._close()
1633 del r, tbl
1634 first = False
1635 if unit is not None:
1636 self.set_fluxunit(unit)
1637 self.set_freqframe(rcParams['scantable.freqframe'])
1638 #self._add_history("scantable", varlist)
1639
Note: See TracBrowser for help on using the repository browser.